effect
stringclasses 48
values | original_source_type
stringlengths 0
23k
| opens_and_abbrevs
listlengths 2
92
| isa_cross_project_example
bool 1
class | source_definition
stringlengths 9
57.9k
| partial_definition
stringlengths 7
23.3k
| is_div
bool 2
classes | is_type
null | is_proof
bool 2
classes | completed_definiton
stringlengths 1
250k
| dependencies
dict | effect_flags
sequencelengths 0
2
| ideal_premises
sequencelengths 0
236
| mutual_with
sequencelengths 0
11
| file_context
stringlengths 0
407k
| interleaved
bool 1
class | is_simply_typed
bool 2
classes | file_name
stringlengths 5
48
| vconfig
dict | is_simple_lemma
null | source_type
stringlengths 10
23k
| proof_features
sequencelengths 0
1
| name
stringlengths 8
95
| source
dict | verbose_type
stringlengths 1
7.42k
| source_range
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Prims.Tot | val rotate_right_i (#t: inttype) (#l: secrecy_level) (s: rotval t {unsigned t}) (u: uint_t t l)
: uint_t t l | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rotate_right_i (#t:inttype) (#l:secrecy_level) (s:rotval t{unsigned t}) (u:uint_t t l) : uint_t t l = rotate_right u s | val rotate_right_i (#t: inttype) (#l: secrecy_level) (s: rotval t {unsigned t}) (u: uint_t t l)
: uint_t t l
let rotate_right_i (#t: inttype) (#l: secrecy_level) (s: rotval t {unsigned t}) (u: uint_t t l)
: uint_t t l = | false | null | false | rotate_right u s | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Lib.IntTypes.inttype",
"Lib.IntTypes.secrecy_level",
"Lib.IntTypes.rotval",
"Prims.b2t",
"Lib.IntTypes.unsigned",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.rotate_right"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128
val bits_numbytes: t:inttype{~(U1? t)} -> Lemma (bits t == 8 * numbytes t)
// [SMTPat [bits t; numbytes t]]
unfold
let modulus (t:inttype) = pow2 (bits t)
[@(strict_on_arguments [0])]
unfold
let maxint (t:inttype) =
if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1
[@(strict_on_arguments [0])]
unfold
let minint (t:inttype) =
if unsigned t then 0 else -(pow2 (bits t - 1))
let range (n:int) (t:inttype) : Type0 =
minint t <= n /\ n <= maxint t
unfold
type range_t (t:inttype) = x:int{range x t}
///
/// PUBLIC Machine Integers
///
inline_for_extraction
let pub_int_t = function
| U1 -> n:UInt8.t{UInt8.v n < 2}
| U8 -> UInt8.t
| U16 -> UInt16.t
| U32 -> UInt32.t
| U64 -> UInt64.t
| U128 -> UInt128.t
| S8 -> Int8.t
| S16 -> Int16.t
| S32 -> Int32.t
| S64 -> Int64.t
| S128 -> Int128.t
[@(strict_on_arguments [0])]
unfold
let pub_int_v #t (x:pub_int_t t) : range_t t =
match t with
| U1 -> UInt8.v x
| U8 -> UInt8.v x
| U16 -> UInt16.v x
| U32 -> UInt32.v x
| U64 -> UInt64.v x
| U128 -> UInt128.v x
| S8 -> Int8.v x
| S16 -> Int16.v x
| S32 -> Int32.v x
| S64 -> Int64.v x
| S128 -> Int128.v x
///
/// SECRET Machine Integers
///
type secrecy_level =
| SEC
| PUB
inline_for_extraction
val sec_int_t: inttype -> Type0
val sec_int_v: #t:inttype -> sec_int_t t -> range_t t
///
/// GENERIC Machine Integers
///
inline_for_extraction
let int_t (t:inttype) (l:secrecy_level) =
match l with
| PUB -> pub_int_t t
| SEC -> sec_int_t t
[@(strict_on_arguments [1])]
let v #t #l (u:int_t t l) : range_t t =
match l with
| PUB -> pub_int_v #t u
| SEC -> sec_int_v #t u
unfold
let uint_t (t:inttype{unsigned t}) (l:secrecy_level) = int_t t l
unfold
let sint_t (t:inttype{signed t}) (l:secrecy_level) = int_t t l
unfold
let uint_v #t #l (u:uint_t t l) = v u
unfold
let sint_v #t #l (u:sint_t t l) = v u
unfold
type uint1 = uint_t U1 SEC
unfold
type uint8 = uint_t U8 SEC
unfold
type int8 = sint_t S8 SEC
unfold
type uint16 = uint_t U16 SEC
unfold
type int16 = sint_t S16 SEC
unfold
type uint32 = uint_t U32 SEC
unfold
type int32 = sint_t S32 SEC
unfold
type uint64 = uint_t U64 SEC
unfold
type int64 = sint_t S64 SEC
unfold
type uint128 = uint_t U128 SEC
unfold
type int128 = sint_t S128 SEC
unfold
type bit_t = uint_t U1 PUB
unfold
type byte_t = uint_t U8 PUB
unfold
type size_t = uint_t U32 PUB
// 2019.7.19: Used only by experimental Blake2b; remove?
unfold
type size128_t = uint_t U128 PUB
unfold
type pub_uint8 = uint_t U8 PUB
unfold
type pub_int8 = sint_t S8 PUB
unfold
type pub_uint16 = uint_t U16 PUB
unfold
type pub_int16 = sint_t S16 PUB
unfold
type pub_uint32 = uint_t U32 PUB
unfold
type pub_int32 = sint_t S32 PUB
unfold
type pub_uint64 = uint_t U64 PUB
unfold
type pub_int64 = sint_t S64 PUB
unfold
type pub_uint128 = uint_t U128 PUB
unfold
type pub_int128 = sint_t S128 PUB
///
/// Casts between mathematical and machine integers
///
inline_for_extraction
val secret: #t:inttype -> x:int_t t PUB -> y:int_t t SEC{v x == v y}
[@(strict_on_arguments [0])]
inline_for_extraction
val mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> u:int_t t l{v u == n}
unfold
let uint (#t:inttype{unsigned t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
unfold
let sint (#t:inttype{signed t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
val v_injective: #t:inttype -> #l:secrecy_level -> a:int_t t l -> Lemma
(mk_int (v #t #l a) == a)
[SMTPat (v #t #l a)]
val v_mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> Lemma
(v #t #l (mk_int #t #l n) == n)
[SMTPat (v #t #l (mk_int #t #l n))]
unfold
let u1 (n:range_t U1) : u:uint1{v u == n} = uint #U1 #SEC n
unfold
let u8 (n:range_t U8) : u:uint8{v u == n} = uint #U8 #SEC n
unfold
let i8 (n:range_t S8) : u:int8{v u == n} = sint #S8 #SEC n
unfold
let u16 (n:range_t U16) : u:uint16{v u == n} = uint #U16 #SEC n
unfold
let i16 (n:range_t S16) : u:int16{v u == n} = sint #S16 #SEC n
unfold
let u32 (n:range_t U32) : u:uint32{v u == n} = uint #U32 #SEC n
unfold
let i32 (n:range_t S32) : u:int32{v u == n} = sint #S32 #SEC n
unfold
let u64 (n:range_t U64) : u:uint64{v u == n} = uint #U64 #SEC n
unfold
let i64 (n:range_t S64) : u:int64{v u == n} = sint #S64 #SEC n
(* We only support 64-bit literals, hence the unexpected upper limit *)
inline_for_extraction
val u128: n:range_t U64 -> u:uint128{v #U128 u == n}
inline_for_extraction
val i128 (n:range_t S64) : u:int128{v #S128 u == n}
unfold
let max_size_t = maxint U32
unfold
type size_nat = n:nat{n <= max_size_t}
unfold
type size_pos = n:pos{n <= max_size_t}
unfold
let size (n:size_nat) : size_t = uint #U32 #PUB n
unfold
let size_v (s:size_t) = v s
unfold
let byte (n:nat{n < 256}) : b:byte_t{v b == n} = uint #U8 #PUB n
unfold
let byte_v (s:byte_t) : n:size_nat{v s == n} = v s
inline_for_extraction
val size_to_uint32: s:size_t -> u:uint32{u == u32 (v s)}
inline_for_extraction
val size_to_uint64: s:size_t -> u:uint64{u == u64 (v s)}
inline_for_extraction
val byte_to_uint8: s:byte_t -> u:uint8{u == u8 (v s)}
[@(strict_on_arguments [0])]
inline_for_extraction
let op_At_Percent_Dot x t =
if unsigned t then x % modulus t
else FStar.Int.(x @% modulus t)
// Casting a value to a signed type is implementation-defined when the value can't
// be represented in the new type; e.g. (int8_t)128UL is implementation-defined
// We rule out this case in the type of `u1`
// See 6.3.1.3 in http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1548.pdf
[@(strict_on_arguments [0;2])]
inline_for_extraction
val cast: #t:inttype -> #l:secrecy_level
-> t':inttype
-> l':secrecy_level{PUB? l \/ SEC? l'}
-> u1:int_t t l{unsigned t' \/ range (v u1) t'}
-> u2:int_t t' l'{v u2 == v u1 @%. t'}
[@(strict_on_arguments [0])]
unfold
let to_u1 #t #l u : uint1 = cast #t #l U1 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u8 #t #l u : uint8 = cast #t #l U8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i8 #t #l u : int8 = cast #t #l S8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u16 #t #l u : uint16 = cast #t #l U16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i16 #t #l u : int16 = cast #t #l S16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u32 #t #l u : uint32 = cast #t #l U32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i32 #t #l u : int32 = cast #t #l S32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u64 #t #l u : uint64 = cast #t #l U64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i64 #t #l u : int64 = cast #t #l S64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u128 #t #l u : uint128 = cast #t #l U128 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i128 #t #l u : int128 = cast #t #l S128 SEC u
///
/// Bitwise operators for all machine integers
///
[@(strict_on_arguments [0])]
inline_for_extraction
let ones_v (t:inttype) =
match t with
| U1 | U8 | U16 | U32 | U64 | U128 -> maxint t
| S8 | S16 | S32 | S64 | S128 -> -1
[@(strict_on_arguments [0])]
inline_for_extraction
val ones: t:inttype -> l:secrecy_level -> n:int_t t l{v n = ones_v t}
inline_for_extraction
val zeros: t:inttype -> l:secrecy_level -> n:int_t t l{v n = 0}
[@(strict_on_arguments [0])]
inline_for_extraction
val add_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val add_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(v (add_mod a b) == (v a + v b) @%. t)
[SMTPat (v #t #l (add_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val add: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> int_t t l
val add_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> Lemma
(v #t #l (add #t #l a b) == v a + v b)
[SMTPat (v #t #l (add #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val incr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> int_t t l
val incr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> Lemma (v (incr a) == v a + 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val mul_mod: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val mul_mod_lemma: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (mul_mod a b) == (v a * v b) @%. t)
[SMTPat (v #t #l (mul_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val mul: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> int_t t l
val mul_lemma: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> Lemma (v #t #l (mul #t #l a b) == v a * v b)
[SMTPat (v #t #l (mul #t #l a b))]
inline_for_extraction
val mul64_wide: uint64 -> uint64 -> uint128
val mul64_wide_lemma: a:uint64 -> b:uint64 -> Lemma
(v (mul64_wide a b) == v a * v b)
[SMTPat (v (mul64_wide a b))]
// KB: I'd prefer
// v (mul64_wide a b) = (pow2 (bits t) + v a - v b) % pow2 (bits t)
inline_for_extraction
val mul_s64_wide: int64 -> int64 -> int128
val mul_s64_wide_lemma: a:int64 -> b:int64 -> Lemma
(v (mul_s64_wide a b) == v a * v b)
[SMTPat (v (mul_s64_wide a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val sub_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level -> a:int_t t l -> b:int_t t l
-> Lemma (v (sub_mod a b) == (v a - v b) @%. t)
[SMTPat (v #t #l (sub_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> int_t t l
val sub_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> Lemma (v (sub a b) == v a - v b)
[SMTPat (v #t #l (sub #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val decr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> int_t t l
val decr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> Lemma (v (decr a) == v a - 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val logxor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logxor_lemma: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(a `logxor` (a `logxor` b) == b /\
a `logxor` (b `logxor` a) == b /\
a `logxor` (mk_int #t #l 0) == a)
val logxor_lemma1: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(requires range (v a) U1 /\ range (v b) U1)
(ensures range (v (a `logxor` b)) U1)
let logxor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logxor #(bits t) a b
| _ -> UInt.logxor #(bits t) a b
val logxor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logxor` b) == v a `logxor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val logand: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logand_zeros: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` zeros t l) == 0)
val logand_ones: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` ones t l) == v a)
// For backwards compatibility
val logand_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = 0 then v (a `logand` b) == 0 else v (a `logand` b) == v b))
let logand_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logand #(bits t) a b
| _ -> UInt.logand #(bits t) a b
val logand_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logand` b) == v a `logand_v` v b)
//[SMTPat (v (a `logand` b))]
val logand_le:#t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l ->
Lemma (requires True)
(ensures v (logand a b) <= v a /\ v (logand a b) <= v b)
val logand_mask: #t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l -> m:pos{m < bits t} ->
Lemma
(requires v b == pow2 m - 1)
(ensures v (logand #t #l a b) == v a % pow2 m)
[@(strict_on_arguments [0])]
inline_for_extraction
val logor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logor_disjoint: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> m:nat{m < bits t}
-> Lemma
(requires 0 <= v a /\ v a < pow2 m /\ v b % pow2 m == 0)
(ensures v (a `logor` b) == v a + v b)
//[SMTPat (v (a `logor` b))]
val logor_zeros: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` zeros t l) == v a)
val logor_ones: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` ones t l) == ones_v t)
// For backwards compatibility
val logor_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = ones_v t then v (a `logor` b) == ones_v t else v (a `logor` b) == v b))
let logor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logor #(bits t) a b
| _ -> UInt.logor #(bits t) a b
val logor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logor` b) == v a `logor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val lognot: #t:inttype -> #l:secrecy_level -> int_t t l -> int_t t l
val lognot_lemma: #t: inttype -> #l: secrecy_level ->
a: int_t t l ->
Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = ones_v t then v (lognot a) == 0 else v (lognot a) == ones_v t))
let lognot_v (#t:inttype) (a:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.lognot #(bits t) a
| _ -> UInt.lognot #(bits t) a
val lognot_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> Lemma (v (lognot a) == lognot_v (v a))
inline_for_extraction
type shiftval (t:inttype) = u:size_t{v u < bits t}
inline_for_extraction
type rotval (t:inttype) = u:size_t{0 < v u /\ v u < bits t}
[@(strict_on_arguments [0])]
inline_for_extraction
val shift_right: #t:inttype -> #l:secrecy_level
-> int_t t l
-> shiftval t
-> int_t t l
val shift_right_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:shiftval t
-> Lemma
(v (shift_right a b) == v a / pow2 (v b))
[SMTPat (v #t #l (shift_right #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val shift_left: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> s:shiftval t
-> Pure (int_t t l)
(requires unsigned t \/ (0 <= v a /\ v a * pow2 (v s) <= maxint t))
(ensures fun _ -> True)
val shift_left_lemma:
#t:inttype
-> #l:secrecy_level
-> a:int_t t l{unsigned t \/ 0 <= v a}
-> s:shiftval t{unsigned t \/ (0 <= v a /\ v a * pow2 (v s) <= maxint t)}
-> Lemma
(v (shift_left a s) == (v a * pow2 (v s)) @%. t)
[SMTPat (v #t #l (shift_left #t #l a s))]
[@(strict_on_arguments [0])]
inline_for_extraction
val rotate_right: #t:inttype -> #l:secrecy_level
-> a:int_t t l{unsigned t}
-> rotval t
-> int_t t l
[@(strict_on_arguments [0])]
inline_for_extraction
val rotate_left: #t:inttype -> #l:secrecy_level
-> a:int_t t l{unsigned t}
-> rotval t
-> int_t t l
inline_for_extraction
let shift_right_i (#t:inttype) (#l:secrecy_level) (s:shiftval t{unsigned t}) (u:uint_t t l) : uint_t t l = shift_right u s
inline_for_extraction
let shift_left_i (#t:inttype) (#l:secrecy_level) (s:shiftval t{unsigned t}) (u:uint_t t l) : uint_t t l = shift_left u s | false | false | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val rotate_right_i (#t: inttype) (#l: secrecy_level) (s: rotval t {unsigned t}) (u: uint_t t l)
: uint_t t l | [] | Lib.IntTypes.rotate_right_i | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Lib.IntTypes.rotval t {Lib.IntTypes.unsigned t} -> u129: Lib.IntTypes.uint_t t l
-> Lib.IntTypes.uint_t t l | {
"end_col": 122,
"end_line": 714,
"start_col": 106,
"start_line": 714
} |
Prims.Tot | val shift_left_i (#t: inttype) (#l: secrecy_level) (s: shiftval t {unsigned t}) (u: uint_t t l)
: uint_t t l | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let shift_left_i (#t:inttype) (#l:secrecy_level) (s:shiftval t{unsigned t}) (u:uint_t t l) : uint_t t l = shift_left u s | val shift_left_i (#t: inttype) (#l: secrecy_level) (s: shiftval t {unsigned t}) (u: uint_t t l)
: uint_t t l
let shift_left_i (#t: inttype) (#l: secrecy_level) (s: shiftval t {unsigned t}) (u: uint_t t l)
: uint_t t l = | false | null | false | shift_left u s | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Lib.IntTypes.inttype",
"Lib.IntTypes.secrecy_level",
"Lib.IntTypes.shiftval",
"Prims.b2t",
"Lib.IntTypes.unsigned",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.shift_left"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128
val bits_numbytes: t:inttype{~(U1? t)} -> Lemma (bits t == 8 * numbytes t)
// [SMTPat [bits t; numbytes t]]
unfold
let modulus (t:inttype) = pow2 (bits t)
[@(strict_on_arguments [0])]
unfold
let maxint (t:inttype) =
if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1
[@(strict_on_arguments [0])]
unfold
let minint (t:inttype) =
if unsigned t then 0 else -(pow2 (bits t - 1))
let range (n:int) (t:inttype) : Type0 =
minint t <= n /\ n <= maxint t
unfold
type range_t (t:inttype) = x:int{range x t}
///
/// PUBLIC Machine Integers
///
inline_for_extraction
let pub_int_t = function
| U1 -> n:UInt8.t{UInt8.v n < 2}
| U8 -> UInt8.t
| U16 -> UInt16.t
| U32 -> UInt32.t
| U64 -> UInt64.t
| U128 -> UInt128.t
| S8 -> Int8.t
| S16 -> Int16.t
| S32 -> Int32.t
| S64 -> Int64.t
| S128 -> Int128.t
[@(strict_on_arguments [0])]
unfold
let pub_int_v #t (x:pub_int_t t) : range_t t =
match t with
| U1 -> UInt8.v x
| U8 -> UInt8.v x
| U16 -> UInt16.v x
| U32 -> UInt32.v x
| U64 -> UInt64.v x
| U128 -> UInt128.v x
| S8 -> Int8.v x
| S16 -> Int16.v x
| S32 -> Int32.v x
| S64 -> Int64.v x
| S128 -> Int128.v x
///
/// SECRET Machine Integers
///
type secrecy_level =
| SEC
| PUB
inline_for_extraction
val sec_int_t: inttype -> Type0
val sec_int_v: #t:inttype -> sec_int_t t -> range_t t
///
/// GENERIC Machine Integers
///
inline_for_extraction
let int_t (t:inttype) (l:secrecy_level) =
match l with
| PUB -> pub_int_t t
| SEC -> sec_int_t t
[@(strict_on_arguments [1])]
let v #t #l (u:int_t t l) : range_t t =
match l with
| PUB -> pub_int_v #t u
| SEC -> sec_int_v #t u
unfold
let uint_t (t:inttype{unsigned t}) (l:secrecy_level) = int_t t l
unfold
let sint_t (t:inttype{signed t}) (l:secrecy_level) = int_t t l
unfold
let uint_v #t #l (u:uint_t t l) = v u
unfold
let sint_v #t #l (u:sint_t t l) = v u
unfold
type uint1 = uint_t U1 SEC
unfold
type uint8 = uint_t U8 SEC
unfold
type int8 = sint_t S8 SEC
unfold
type uint16 = uint_t U16 SEC
unfold
type int16 = sint_t S16 SEC
unfold
type uint32 = uint_t U32 SEC
unfold
type int32 = sint_t S32 SEC
unfold
type uint64 = uint_t U64 SEC
unfold
type int64 = sint_t S64 SEC
unfold
type uint128 = uint_t U128 SEC
unfold
type int128 = sint_t S128 SEC
unfold
type bit_t = uint_t U1 PUB
unfold
type byte_t = uint_t U8 PUB
unfold
type size_t = uint_t U32 PUB
// 2019.7.19: Used only by experimental Blake2b; remove?
unfold
type size128_t = uint_t U128 PUB
unfold
type pub_uint8 = uint_t U8 PUB
unfold
type pub_int8 = sint_t S8 PUB
unfold
type pub_uint16 = uint_t U16 PUB
unfold
type pub_int16 = sint_t S16 PUB
unfold
type pub_uint32 = uint_t U32 PUB
unfold
type pub_int32 = sint_t S32 PUB
unfold
type pub_uint64 = uint_t U64 PUB
unfold
type pub_int64 = sint_t S64 PUB
unfold
type pub_uint128 = uint_t U128 PUB
unfold
type pub_int128 = sint_t S128 PUB
///
/// Casts between mathematical and machine integers
///
inline_for_extraction
val secret: #t:inttype -> x:int_t t PUB -> y:int_t t SEC{v x == v y}
[@(strict_on_arguments [0])]
inline_for_extraction
val mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> u:int_t t l{v u == n}
unfold
let uint (#t:inttype{unsigned t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
unfold
let sint (#t:inttype{signed t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
val v_injective: #t:inttype -> #l:secrecy_level -> a:int_t t l -> Lemma
(mk_int (v #t #l a) == a)
[SMTPat (v #t #l a)]
val v_mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> Lemma
(v #t #l (mk_int #t #l n) == n)
[SMTPat (v #t #l (mk_int #t #l n))]
unfold
let u1 (n:range_t U1) : u:uint1{v u == n} = uint #U1 #SEC n
unfold
let u8 (n:range_t U8) : u:uint8{v u == n} = uint #U8 #SEC n
unfold
let i8 (n:range_t S8) : u:int8{v u == n} = sint #S8 #SEC n
unfold
let u16 (n:range_t U16) : u:uint16{v u == n} = uint #U16 #SEC n
unfold
let i16 (n:range_t S16) : u:int16{v u == n} = sint #S16 #SEC n
unfold
let u32 (n:range_t U32) : u:uint32{v u == n} = uint #U32 #SEC n
unfold
let i32 (n:range_t S32) : u:int32{v u == n} = sint #S32 #SEC n
unfold
let u64 (n:range_t U64) : u:uint64{v u == n} = uint #U64 #SEC n
unfold
let i64 (n:range_t S64) : u:int64{v u == n} = sint #S64 #SEC n
(* We only support 64-bit literals, hence the unexpected upper limit *)
inline_for_extraction
val u128: n:range_t U64 -> u:uint128{v #U128 u == n}
inline_for_extraction
val i128 (n:range_t S64) : u:int128{v #S128 u == n}
unfold
let max_size_t = maxint U32
unfold
type size_nat = n:nat{n <= max_size_t}
unfold
type size_pos = n:pos{n <= max_size_t}
unfold
let size (n:size_nat) : size_t = uint #U32 #PUB n
unfold
let size_v (s:size_t) = v s
unfold
let byte (n:nat{n < 256}) : b:byte_t{v b == n} = uint #U8 #PUB n
unfold
let byte_v (s:byte_t) : n:size_nat{v s == n} = v s
inline_for_extraction
val size_to_uint32: s:size_t -> u:uint32{u == u32 (v s)}
inline_for_extraction
val size_to_uint64: s:size_t -> u:uint64{u == u64 (v s)}
inline_for_extraction
val byte_to_uint8: s:byte_t -> u:uint8{u == u8 (v s)}
[@(strict_on_arguments [0])]
inline_for_extraction
let op_At_Percent_Dot x t =
if unsigned t then x % modulus t
else FStar.Int.(x @% modulus t)
// Casting a value to a signed type is implementation-defined when the value can't
// be represented in the new type; e.g. (int8_t)128UL is implementation-defined
// We rule out this case in the type of `u1`
// See 6.3.1.3 in http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1548.pdf
[@(strict_on_arguments [0;2])]
inline_for_extraction
val cast: #t:inttype -> #l:secrecy_level
-> t':inttype
-> l':secrecy_level{PUB? l \/ SEC? l'}
-> u1:int_t t l{unsigned t' \/ range (v u1) t'}
-> u2:int_t t' l'{v u2 == v u1 @%. t'}
[@(strict_on_arguments [0])]
unfold
let to_u1 #t #l u : uint1 = cast #t #l U1 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u8 #t #l u : uint8 = cast #t #l U8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i8 #t #l u : int8 = cast #t #l S8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u16 #t #l u : uint16 = cast #t #l U16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i16 #t #l u : int16 = cast #t #l S16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u32 #t #l u : uint32 = cast #t #l U32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i32 #t #l u : int32 = cast #t #l S32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u64 #t #l u : uint64 = cast #t #l U64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i64 #t #l u : int64 = cast #t #l S64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u128 #t #l u : uint128 = cast #t #l U128 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i128 #t #l u : int128 = cast #t #l S128 SEC u
///
/// Bitwise operators for all machine integers
///
[@(strict_on_arguments [0])]
inline_for_extraction
let ones_v (t:inttype) =
match t with
| U1 | U8 | U16 | U32 | U64 | U128 -> maxint t
| S8 | S16 | S32 | S64 | S128 -> -1
[@(strict_on_arguments [0])]
inline_for_extraction
val ones: t:inttype -> l:secrecy_level -> n:int_t t l{v n = ones_v t}
inline_for_extraction
val zeros: t:inttype -> l:secrecy_level -> n:int_t t l{v n = 0}
[@(strict_on_arguments [0])]
inline_for_extraction
val add_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val add_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(v (add_mod a b) == (v a + v b) @%. t)
[SMTPat (v #t #l (add_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val add: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> int_t t l
val add_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> Lemma
(v #t #l (add #t #l a b) == v a + v b)
[SMTPat (v #t #l (add #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val incr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> int_t t l
val incr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> Lemma (v (incr a) == v a + 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val mul_mod: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val mul_mod_lemma: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (mul_mod a b) == (v a * v b) @%. t)
[SMTPat (v #t #l (mul_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val mul: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> int_t t l
val mul_lemma: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> Lemma (v #t #l (mul #t #l a b) == v a * v b)
[SMTPat (v #t #l (mul #t #l a b))]
inline_for_extraction
val mul64_wide: uint64 -> uint64 -> uint128
val mul64_wide_lemma: a:uint64 -> b:uint64 -> Lemma
(v (mul64_wide a b) == v a * v b)
[SMTPat (v (mul64_wide a b))]
// KB: I'd prefer
// v (mul64_wide a b) = (pow2 (bits t) + v a - v b) % pow2 (bits t)
inline_for_extraction
val mul_s64_wide: int64 -> int64 -> int128
val mul_s64_wide_lemma: a:int64 -> b:int64 -> Lemma
(v (mul_s64_wide a b) == v a * v b)
[SMTPat (v (mul_s64_wide a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val sub_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level -> a:int_t t l -> b:int_t t l
-> Lemma (v (sub_mod a b) == (v a - v b) @%. t)
[SMTPat (v #t #l (sub_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> int_t t l
val sub_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> Lemma (v (sub a b) == v a - v b)
[SMTPat (v #t #l (sub #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val decr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> int_t t l
val decr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> Lemma (v (decr a) == v a - 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val logxor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logxor_lemma: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(a `logxor` (a `logxor` b) == b /\
a `logxor` (b `logxor` a) == b /\
a `logxor` (mk_int #t #l 0) == a)
val logxor_lemma1: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(requires range (v a) U1 /\ range (v b) U1)
(ensures range (v (a `logxor` b)) U1)
let logxor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logxor #(bits t) a b
| _ -> UInt.logxor #(bits t) a b
val logxor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logxor` b) == v a `logxor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val logand: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logand_zeros: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` zeros t l) == 0)
val logand_ones: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` ones t l) == v a)
// For backwards compatibility
val logand_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = 0 then v (a `logand` b) == 0 else v (a `logand` b) == v b))
let logand_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logand #(bits t) a b
| _ -> UInt.logand #(bits t) a b
val logand_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logand` b) == v a `logand_v` v b)
//[SMTPat (v (a `logand` b))]
val logand_le:#t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l ->
Lemma (requires True)
(ensures v (logand a b) <= v a /\ v (logand a b) <= v b)
val logand_mask: #t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l -> m:pos{m < bits t} ->
Lemma
(requires v b == pow2 m - 1)
(ensures v (logand #t #l a b) == v a % pow2 m)
[@(strict_on_arguments [0])]
inline_for_extraction
val logor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logor_disjoint: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> m:nat{m < bits t}
-> Lemma
(requires 0 <= v a /\ v a < pow2 m /\ v b % pow2 m == 0)
(ensures v (a `logor` b) == v a + v b)
//[SMTPat (v (a `logor` b))]
val logor_zeros: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` zeros t l) == v a)
val logor_ones: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` ones t l) == ones_v t)
// For backwards compatibility
val logor_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = ones_v t then v (a `logor` b) == ones_v t else v (a `logor` b) == v b))
let logor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logor #(bits t) a b
| _ -> UInt.logor #(bits t) a b
val logor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logor` b) == v a `logor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val lognot: #t:inttype -> #l:secrecy_level -> int_t t l -> int_t t l
val lognot_lemma: #t: inttype -> #l: secrecy_level ->
a: int_t t l ->
Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = ones_v t then v (lognot a) == 0 else v (lognot a) == ones_v t))
let lognot_v (#t:inttype) (a:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.lognot #(bits t) a
| _ -> UInt.lognot #(bits t) a
val lognot_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> Lemma (v (lognot a) == lognot_v (v a))
inline_for_extraction
type shiftval (t:inttype) = u:size_t{v u < bits t}
inline_for_extraction
type rotval (t:inttype) = u:size_t{0 < v u /\ v u < bits t}
[@(strict_on_arguments [0])]
inline_for_extraction
val shift_right: #t:inttype -> #l:secrecy_level
-> int_t t l
-> shiftval t
-> int_t t l
val shift_right_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:shiftval t
-> Lemma
(v (shift_right a b) == v a / pow2 (v b))
[SMTPat (v #t #l (shift_right #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val shift_left: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> s:shiftval t
-> Pure (int_t t l)
(requires unsigned t \/ (0 <= v a /\ v a * pow2 (v s) <= maxint t))
(ensures fun _ -> True)
val shift_left_lemma:
#t:inttype
-> #l:secrecy_level
-> a:int_t t l{unsigned t \/ 0 <= v a}
-> s:shiftval t{unsigned t \/ (0 <= v a /\ v a * pow2 (v s) <= maxint t)}
-> Lemma
(v (shift_left a s) == (v a * pow2 (v s)) @%. t)
[SMTPat (v #t #l (shift_left #t #l a s))]
[@(strict_on_arguments [0])]
inline_for_extraction
val rotate_right: #t:inttype -> #l:secrecy_level
-> a:int_t t l{unsigned t}
-> rotval t
-> int_t t l
[@(strict_on_arguments [0])]
inline_for_extraction
val rotate_left: #t:inttype -> #l:secrecy_level
-> a:int_t t l{unsigned t}
-> rotval t
-> int_t t l
inline_for_extraction
let shift_right_i (#t:inttype) (#l:secrecy_level) (s:shiftval t{unsigned t}) (u:uint_t t l) : uint_t t l = shift_right u s | false | false | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val shift_left_i (#t: inttype) (#l: secrecy_level) (s: shiftval t {unsigned t}) (u: uint_t t l)
: uint_t t l | [] | Lib.IntTypes.shift_left_i | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Lib.IntTypes.shiftval t {Lib.IntTypes.unsigned t} -> u125: Lib.IntTypes.uint_t t l
-> Lib.IntTypes.uint_t t l | {
"end_col": 120,
"end_line": 711,
"start_col": 106,
"start_line": 711
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128 | let bits = | false | null | false | function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128 | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Lib.IntTypes.inttype",
"Prims.int"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold | false | true | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val bits : _: Lib.IntTypes.inttype -> Prims.int | [] | Lib.IntTypes.bits | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | _: Lib.IntTypes.inttype -> Prims.int | {
"end_col": 15,
"end_line": 69,
"start_col": 11,
"start_line": 58
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let maxint (t:inttype) =
if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1 | let maxint (t: inttype) = | false | null | false | if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1 | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Lib.IntTypes.inttype",
"Lib.IntTypes.unsigned",
"Prims.op_Subtraction",
"Prims.pow2",
"Lib.IntTypes.bits",
"Prims.bool",
"Prims.int"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128
val bits_numbytes: t:inttype{~(U1? t)} -> Lemma (bits t == 8 * numbytes t)
// [SMTPat [bits t; numbytes t]]
unfold
let modulus (t:inttype) = pow2 (bits t)
[@(strict_on_arguments [0])]
unfold | false | true | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val maxint : t: Lib.IntTypes.inttype -> Prims.int | [] | Lib.IntTypes.maxint | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | t: Lib.IntTypes.inttype -> Prims.int | {
"end_col": 65,
"end_line": 80,
"start_col": 2,
"start_line": 80
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let int_t (t:inttype) (l:secrecy_level) =
match l with
| PUB -> pub_int_t t
| SEC -> sec_int_t t | let int_t (t: inttype) (l: secrecy_level) = | false | null | false | match l with
| PUB -> pub_int_t t
| SEC -> sec_int_t t | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Lib.IntTypes.inttype",
"Lib.IntTypes.secrecy_level",
"Lib.IntTypes.pub_int_t",
"Lib.IntTypes.sec_int_t"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128
val bits_numbytes: t:inttype{~(U1? t)} -> Lemma (bits t == 8 * numbytes t)
// [SMTPat [bits t; numbytes t]]
unfold
let modulus (t:inttype) = pow2 (bits t)
[@(strict_on_arguments [0])]
unfold
let maxint (t:inttype) =
if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1
[@(strict_on_arguments [0])]
unfold
let minint (t:inttype) =
if unsigned t then 0 else -(pow2 (bits t - 1))
let range (n:int) (t:inttype) : Type0 =
minint t <= n /\ n <= maxint t
unfold
type range_t (t:inttype) = x:int{range x t}
///
/// PUBLIC Machine Integers
///
inline_for_extraction
let pub_int_t = function
| U1 -> n:UInt8.t{UInt8.v n < 2}
| U8 -> UInt8.t
| U16 -> UInt16.t
| U32 -> UInt32.t
| U64 -> UInt64.t
| U128 -> UInt128.t
| S8 -> Int8.t
| S16 -> Int16.t
| S32 -> Int32.t
| S64 -> Int64.t
| S128 -> Int128.t
[@(strict_on_arguments [0])]
unfold
let pub_int_v #t (x:pub_int_t t) : range_t t =
match t with
| U1 -> UInt8.v x
| U8 -> UInt8.v x
| U16 -> UInt16.v x
| U32 -> UInt32.v x
| U64 -> UInt64.v x
| U128 -> UInt128.v x
| S8 -> Int8.v x
| S16 -> Int16.v x
| S32 -> Int32.v x
| S64 -> Int64.v x
| S128 -> Int128.v x
///
/// SECRET Machine Integers
///
type secrecy_level =
| SEC
| PUB
inline_for_extraction
val sec_int_t: inttype -> Type0
val sec_int_v: #t:inttype -> sec_int_t t -> range_t t
///
/// GENERIC Machine Integers
///
inline_for_extraction | false | true | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val int_t : t: Lib.IntTypes.inttype -> l: Lib.IntTypes.secrecy_level -> Type0 | [] | Lib.IntTypes.int_t | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | t: Lib.IntTypes.inttype -> l: Lib.IntTypes.secrecy_level -> Type0 | {
"end_col": 22,
"end_line": 149,
"start_col": 2,
"start_line": 147
} |
|
Prims.Tot | val pub_int_v (#t: _) (x: pub_int_t t) : range_t t | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let pub_int_v #t (x:pub_int_t t) : range_t t =
match t with
| U1 -> UInt8.v x
| U8 -> UInt8.v x
| U16 -> UInt16.v x
| U32 -> UInt32.v x
| U64 -> UInt64.v x
| U128 -> UInt128.v x
| S8 -> Int8.v x
| S16 -> Int16.v x
| S32 -> Int32.v x
| S64 -> Int64.v x
| S128 -> Int128.v x | val pub_int_v (#t: _) (x: pub_int_t t) : range_t t
let pub_int_v #t (x: pub_int_t t) : range_t t = | false | null | false | match t with
| U1 -> UInt8.v x
| U8 -> UInt8.v x
| U16 -> UInt16.v x
| U32 -> UInt32.v x
| U64 -> UInt64.v x
| U128 -> UInt128.v x
| S8 -> Int8.v x
| S16 -> Int16.v x
| S32 -> Int32.v x
| S64 -> Int64.v x
| S128 -> Int128.v x | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Lib.IntTypes.inttype",
"Lib.IntTypes.pub_int_t",
"FStar.UInt8.v",
"FStar.UInt16.v",
"FStar.UInt32.v",
"FStar.UInt64.v",
"FStar.UInt128.v",
"FStar.Int8.v",
"FStar.Int16.v",
"FStar.Int32.v",
"FStar.Int64.v",
"FStar.Int128.v",
"Lib.IntTypes.range_t"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128
val bits_numbytes: t:inttype{~(U1? t)} -> Lemma (bits t == 8 * numbytes t)
// [SMTPat [bits t; numbytes t]]
unfold
let modulus (t:inttype) = pow2 (bits t)
[@(strict_on_arguments [0])]
unfold
let maxint (t:inttype) =
if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1
[@(strict_on_arguments [0])]
unfold
let minint (t:inttype) =
if unsigned t then 0 else -(pow2 (bits t - 1))
let range (n:int) (t:inttype) : Type0 =
minint t <= n /\ n <= maxint t
unfold
type range_t (t:inttype) = x:int{range x t}
///
/// PUBLIC Machine Integers
///
inline_for_extraction
let pub_int_t = function
| U1 -> n:UInt8.t{UInt8.v n < 2}
| U8 -> UInt8.t
| U16 -> UInt16.t
| U32 -> UInt32.t
| U64 -> UInt64.t
| U128 -> UInt128.t
| S8 -> Int8.t
| S16 -> Int16.t
| S32 -> Int32.t
| S64 -> Int64.t
| S128 -> Int128.t
[@(strict_on_arguments [0])]
unfold | false | false | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val pub_int_v (#t: _) (x: pub_int_t t) : range_t t | [] | Lib.IntTypes.pub_int_v | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Lib.IntTypes.pub_int_t t -> Lib.IntTypes.range_t t | {
"end_col": 22,
"end_line": 126,
"start_col": 2,
"start_line": 115
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_At_Percent_Dot x t =
if unsigned t then x % modulus t
else FStar.Int.(x @% modulus t) | let op_At_Percent_Dot x t = | false | null | false | if unsigned t then x % modulus t else let open FStar.Int in x @% modulus t | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Prims.int",
"Lib.IntTypes.inttype",
"Lib.IntTypes.unsigned",
"Prims.op_Modulus",
"Lib.IntTypes.modulus",
"Prims.bool",
"FStar.Int.op_At_Percent"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128
val bits_numbytes: t:inttype{~(U1? t)} -> Lemma (bits t == 8 * numbytes t)
// [SMTPat [bits t; numbytes t]]
unfold
let modulus (t:inttype) = pow2 (bits t)
[@(strict_on_arguments [0])]
unfold
let maxint (t:inttype) =
if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1
[@(strict_on_arguments [0])]
unfold
let minint (t:inttype) =
if unsigned t then 0 else -(pow2 (bits t - 1))
let range (n:int) (t:inttype) : Type0 =
minint t <= n /\ n <= maxint t
unfold
type range_t (t:inttype) = x:int{range x t}
///
/// PUBLIC Machine Integers
///
inline_for_extraction
let pub_int_t = function
| U1 -> n:UInt8.t{UInt8.v n < 2}
| U8 -> UInt8.t
| U16 -> UInt16.t
| U32 -> UInt32.t
| U64 -> UInt64.t
| U128 -> UInt128.t
| S8 -> Int8.t
| S16 -> Int16.t
| S32 -> Int32.t
| S64 -> Int64.t
| S128 -> Int128.t
[@(strict_on_arguments [0])]
unfold
let pub_int_v #t (x:pub_int_t t) : range_t t =
match t with
| U1 -> UInt8.v x
| U8 -> UInt8.v x
| U16 -> UInt16.v x
| U32 -> UInt32.v x
| U64 -> UInt64.v x
| U128 -> UInt128.v x
| S8 -> Int8.v x
| S16 -> Int16.v x
| S32 -> Int32.v x
| S64 -> Int64.v x
| S128 -> Int128.v x
///
/// SECRET Machine Integers
///
type secrecy_level =
| SEC
| PUB
inline_for_extraction
val sec_int_t: inttype -> Type0
val sec_int_v: #t:inttype -> sec_int_t t -> range_t t
///
/// GENERIC Machine Integers
///
inline_for_extraction
let int_t (t:inttype) (l:secrecy_level) =
match l with
| PUB -> pub_int_t t
| SEC -> sec_int_t t
[@(strict_on_arguments [1])]
let v #t #l (u:int_t t l) : range_t t =
match l with
| PUB -> pub_int_v #t u
| SEC -> sec_int_v #t u
unfold
let uint_t (t:inttype{unsigned t}) (l:secrecy_level) = int_t t l
unfold
let sint_t (t:inttype{signed t}) (l:secrecy_level) = int_t t l
unfold
let uint_v #t #l (u:uint_t t l) = v u
unfold
let sint_v #t #l (u:sint_t t l) = v u
unfold
type uint1 = uint_t U1 SEC
unfold
type uint8 = uint_t U8 SEC
unfold
type int8 = sint_t S8 SEC
unfold
type uint16 = uint_t U16 SEC
unfold
type int16 = sint_t S16 SEC
unfold
type uint32 = uint_t U32 SEC
unfold
type int32 = sint_t S32 SEC
unfold
type uint64 = uint_t U64 SEC
unfold
type int64 = sint_t S64 SEC
unfold
type uint128 = uint_t U128 SEC
unfold
type int128 = sint_t S128 SEC
unfold
type bit_t = uint_t U1 PUB
unfold
type byte_t = uint_t U8 PUB
unfold
type size_t = uint_t U32 PUB
// 2019.7.19: Used only by experimental Blake2b; remove?
unfold
type size128_t = uint_t U128 PUB
unfold
type pub_uint8 = uint_t U8 PUB
unfold
type pub_int8 = sint_t S8 PUB
unfold
type pub_uint16 = uint_t U16 PUB
unfold
type pub_int16 = sint_t S16 PUB
unfold
type pub_uint32 = uint_t U32 PUB
unfold
type pub_int32 = sint_t S32 PUB
unfold
type pub_uint64 = uint_t U64 PUB
unfold
type pub_int64 = sint_t S64 PUB
unfold
type pub_uint128 = uint_t U128 PUB
unfold
type pub_int128 = sint_t S128 PUB
///
/// Casts between mathematical and machine integers
///
inline_for_extraction
val secret: #t:inttype -> x:int_t t PUB -> y:int_t t SEC{v x == v y}
[@(strict_on_arguments [0])]
inline_for_extraction
val mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> u:int_t t l{v u == n}
unfold
let uint (#t:inttype{unsigned t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
unfold
let sint (#t:inttype{signed t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
val v_injective: #t:inttype -> #l:secrecy_level -> a:int_t t l -> Lemma
(mk_int (v #t #l a) == a)
[SMTPat (v #t #l a)]
val v_mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> Lemma
(v #t #l (mk_int #t #l n) == n)
[SMTPat (v #t #l (mk_int #t #l n))]
unfold
let u1 (n:range_t U1) : u:uint1{v u == n} = uint #U1 #SEC n
unfold
let u8 (n:range_t U8) : u:uint8{v u == n} = uint #U8 #SEC n
unfold
let i8 (n:range_t S8) : u:int8{v u == n} = sint #S8 #SEC n
unfold
let u16 (n:range_t U16) : u:uint16{v u == n} = uint #U16 #SEC n
unfold
let i16 (n:range_t S16) : u:int16{v u == n} = sint #S16 #SEC n
unfold
let u32 (n:range_t U32) : u:uint32{v u == n} = uint #U32 #SEC n
unfold
let i32 (n:range_t S32) : u:int32{v u == n} = sint #S32 #SEC n
unfold
let u64 (n:range_t U64) : u:uint64{v u == n} = uint #U64 #SEC n
unfold
let i64 (n:range_t S64) : u:int64{v u == n} = sint #S64 #SEC n
(* We only support 64-bit literals, hence the unexpected upper limit *)
inline_for_extraction
val u128: n:range_t U64 -> u:uint128{v #U128 u == n}
inline_for_extraction
val i128 (n:range_t S64) : u:int128{v #S128 u == n}
unfold
let max_size_t = maxint U32
unfold
type size_nat = n:nat{n <= max_size_t}
unfold
type size_pos = n:pos{n <= max_size_t}
unfold
let size (n:size_nat) : size_t = uint #U32 #PUB n
unfold
let size_v (s:size_t) = v s
unfold
let byte (n:nat{n < 256}) : b:byte_t{v b == n} = uint #U8 #PUB n
unfold
let byte_v (s:byte_t) : n:size_nat{v s == n} = v s
inline_for_extraction
val size_to_uint32: s:size_t -> u:uint32{u == u32 (v s)}
inline_for_extraction
val size_to_uint64: s:size_t -> u:uint64{u == u64 (v s)}
inline_for_extraction
val byte_to_uint8: s:byte_t -> u:uint8{u == u8 (v s)}
[@(strict_on_arguments [0])]
inline_for_extraction | false | true | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_At_Percent_Dot : x: Prims.int -> t: Lib.IntTypes.inttype -> Prims.int | [] | Lib.IntTypes.op_At_Percent_Dot | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Prims.int -> t: Lib.IntTypes.inttype -> Prims.int | {
"end_col": 33,
"end_line": 338,
"start_col": 2,
"start_line": 337
} |
|
Prims.Tot | val logor_v (#t: inttype) (a b: range_t t) : range_t t | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let logor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logor #(bits t) a b
| _ -> UInt.logor #(bits t) a b | val logor_v (#t: inttype) (a b: range_t t) : range_t t
let logor_v (#t: inttype) (a b: range_t t) : range_t t = | false | null | false | match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logor #(bits t) a b
| _ -> UInt.logor #(bits t) a b | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Lib.IntTypes.inttype",
"Lib.IntTypes.range_t",
"FStar.Int.logor",
"Lib.IntTypes.bits",
"FStar.UInt.logor"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128
val bits_numbytes: t:inttype{~(U1? t)} -> Lemma (bits t == 8 * numbytes t)
// [SMTPat [bits t; numbytes t]]
unfold
let modulus (t:inttype) = pow2 (bits t)
[@(strict_on_arguments [0])]
unfold
let maxint (t:inttype) =
if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1
[@(strict_on_arguments [0])]
unfold
let minint (t:inttype) =
if unsigned t then 0 else -(pow2 (bits t - 1))
let range (n:int) (t:inttype) : Type0 =
minint t <= n /\ n <= maxint t
unfold
type range_t (t:inttype) = x:int{range x t}
///
/// PUBLIC Machine Integers
///
inline_for_extraction
let pub_int_t = function
| U1 -> n:UInt8.t{UInt8.v n < 2}
| U8 -> UInt8.t
| U16 -> UInt16.t
| U32 -> UInt32.t
| U64 -> UInt64.t
| U128 -> UInt128.t
| S8 -> Int8.t
| S16 -> Int16.t
| S32 -> Int32.t
| S64 -> Int64.t
| S128 -> Int128.t
[@(strict_on_arguments [0])]
unfold
let pub_int_v #t (x:pub_int_t t) : range_t t =
match t with
| U1 -> UInt8.v x
| U8 -> UInt8.v x
| U16 -> UInt16.v x
| U32 -> UInt32.v x
| U64 -> UInt64.v x
| U128 -> UInt128.v x
| S8 -> Int8.v x
| S16 -> Int16.v x
| S32 -> Int32.v x
| S64 -> Int64.v x
| S128 -> Int128.v x
///
/// SECRET Machine Integers
///
type secrecy_level =
| SEC
| PUB
inline_for_extraction
val sec_int_t: inttype -> Type0
val sec_int_v: #t:inttype -> sec_int_t t -> range_t t
///
/// GENERIC Machine Integers
///
inline_for_extraction
let int_t (t:inttype) (l:secrecy_level) =
match l with
| PUB -> pub_int_t t
| SEC -> sec_int_t t
[@(strict_on_arguments [1])]
let v #t #l (u:int_t t l) : range_t t =
match l with
| PUB -> pub_int_v #t u
| SEC -> sec_int_v #t u
unfold
let uint_t (t:inttype{unsigned t}) (l:secrecy_level) = int_t t l
unfold
let sint_t (t:inttype{signed t}) (l:secrecy_level) = int_t t l
unfold
let uint_v #t #l (u:uint_t t l) = v u
unfold
let sint_v #t #l (u:sint_t t l) = v u
unfold
type uint1 = uint_t U1 SEC
unfold
type uint8 = uint_t U8 SEC
unfold
type int8 = sint_t S8 SEC
unfold
type uint16 = uint_t U16 SEC
unfold
type int16 = sint_t S16 SEC
unfold
type uint32 = uint_t U32 SEC
unfold
type int32 = sint_t S32 SEC
unfold
type uint64 = uint_t U64 SEC
unfold
type int64 = sint_t S64 SEC
unfold
type uint128 = uint_t U128 SEC
unfold
type int128 = sint_t S128 SEC
unfold
type bit_t = uint_t U1 PUB
unfold
type byte_t = uint_t U8 PUB
unfold
type size_t = uint_t U32 PUB
// 2019.7.19: Used only by experimental Blake2b; remove?
unfold
type size128_t = uint_t U128 PUB
unfold
type pub_uint8 = uint_t U8 PUB
unfold
type pub_int8 = sint_t S8 PUB
unfold
type pub_uint16 = uint_t U16 PUB
unfold
type pub_int16 = sint_t S16 PUB
unfold
type pub_uint32 = uint_t U32 PUB
unfold
type pub_int32 = sint_t S32 PUB
unfold
type pub_uint64 = uint_t U64 PUB
unfold
type pub_int64 = sint_t S64 PUB
unfold
type pub_uint128 = uint_t U128 PUB
unfold
type pub_int128 = sint_t S128 PUB
///
/// Casts between mathematical and machine integers
///
inline_for_extraction
val secret: #t:inttype -> x:int_t t PUB -> y:int_t t SEC{v x == v y}
[@(strict_on_arguments [0])]
inline_for_extraction
val mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> u:int_t t l{v u == n}
unfold
let uint (#t:inttype{unsigned t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
unfold
let sint (#t:inttype{signed t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
val v_injective: #t:inttype -> #l:secrecy_level -> a:int_t t l -> Lemma
(mk_int (v #t #l a) == a)
[SMTPat (v #t #l a)]
val v_mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> Lemma
(v #t #l (mk_int #t #l n) == n)
[SMTPat (v #t #l (mk_int #t #l n))]
unfold
let u1 (n:range_t U1) : u:uint1{v u == n} = uint #U1 #SEC n
unfold
let u8 (n:range_t U8) : u:uint8{v u == n} = uint #U8 #SEC n
unfold
let i8 (n:range_t S8) : u:int8{v u == n} = sint #S8 #SEC n
unfold
let u16 (n:range_t U16) : u:uint16{v u == n} = uint #U16 #SEC n
unfold
let i16 (n:range_t S16) : u:int16{v u == n} = sint #S16 #SEC n
unfold
let u32 (n:range_t U32) : u:uint32{v u == n} = uint #U32 #SEC n
unfold
let i32 (n:range_t S32) : u:int32{v u == n} = sint #S32 #SEC n
unfold
let u64 (n:range_t U64) : u:uint64{v u == n} = uint #U64 #SEC n
unfold
let i64 (n:range_t S64) : u:int64{v u == n} = sint #S64 #SEC n
(* We only support 64-bit literals, hence the unexpected upper limit *)
inline_for_extraction
val u128: n:range_t U64 -> u:uint128{v #U128 u == n}
inline_for_extraction
val i128 (n:range_t S64) : u:int128{v #S128 u == n}
unfold
let max_size_t = maxint U32
unfold
type size_nat = n:nat{n <= max_size_t}
unfold
type size_pos = n:pos{n <= max_size_t}
unfold
let size (n:size_nat) : size_t = uint #U32 #PUB n
unfold
let size_v (s:size_t) = v s
unfold
let byte (n:nat{n < 256}) : b:byte_t{v b == n} = uint #U8 #PUB n
unfold
let byte_v (s:byte_t) : n:size_nat{v s == n} = v s
inline_for_extraction
val size_to_uint32: s:size_t -> u:uint32{u == u32 (v s)}
inline_for_extraction
val size_to_uint64: s:size_t -> u:uint64{u == u64 (v s)}
inline_for_extraction
val byte_to_uint8: s:byte_t -> u:uint8{u == u8 (v s)}
[@(strict_on_arguments [0])]
inline_for_extraction
let op_At_Percent_Dot x t =
if unsigned t then x % modulus t
else FStar.Int.(x @% modulus t)
// Casting a value to a signed type is implementation-defined when the value can't
// be represented in the new type; e.g. (int8_t)128UL is implementation-defined
// We rule out this case in the type of `u1`
// See 6.3.1.3 in http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1548.pdf
[@(strict_on_arguments [0;2])]
inline_for_extraction
val cast: #t:inttype -> #l:secrecy_level
-> t':inttype
-> l':secrecy_level{PUB? l \/ SEC? l'}
-> u1:int_t t l{unsigned t' \/ range (v u1) t'}
-> u2:int_t t' l'{v u2 == v u1 @%. t'}
[@(strict_on_arguments [0])]
unfold
let to_u1 #t #l u : uint1 = cast #t #l U1 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u8 #t #l u : uint8 = cast #t #l U8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i8 #t #l u : int8 = cast #t #l S8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u16 #t #l u : uint16 = cast #t #l U16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i16 #t #l u : int16 = cast #t #l S16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u32 #t #l u : uint32 = cast #t #l U32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i32 #t #l u : int32 = cast #t #l S32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u64 #t #l u : uint64 = cast #t #l U64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i64 #t #l u : int64 = cast #t #l S64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u128 #t #l u : uint128 = cast #t #l U128 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i128 #t #l u : int128 = cast #t #l S128 SEC u
///
/// Bitwise operators for all machine integers
///
[@(strict_on_arguments [0])]
inline_for_extraction
let ones_v (t:inttype) =
match t with
| U1 | U8 | U16 | U32 | U64 | U128 -> maxint t
| S8 | S16 | S32 | S64 | S128 -> -1
[@(strict_on_arguments [0])]
inline_for_extraction
val ones: t:inttype -> l:secrecy_level -> n:int_t t l{v n = ones_v t}
inline_for_extraction
val zeros: t:inttype -> l:secrecy_level -> n:int_t t l{v n = 0}
[@(strict_on_arguments [0])]
inline_for_extraction
val add_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val add_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(v (add_mod a b) == (v a + v b) @%. t)
[SMTPat (v #t #l (add_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val add: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> int_t t l
val add_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> Lemma
(v #t #l (add #t #l a b) == v a + v b)
[SMTPat (v #t #l (add #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val incr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> int_t t l
val incr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> Lemma (v (incr a) == v a + 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val mul_mod: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val mul_mod_lemma: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (mul_mod a b) == (v a * v b) @%. t)
[SMTPat (v #t #l (mul_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val mul: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> int_t t l
val mul_lemma: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> Lemma (v #t #l (mul #t #l a b) == v a * v b)
[SMTPat (v #t #l (mul #t #l a b))]
inline_for_extraction
val mul64_wide: uint64 -> uint64 -> uint128
val mul64_wide_lemma: a:uint64 -> b:uint64 -> Lemma
(v (mul64_wide a b) == v a * v b)
[SMTPat (v (mul64_wide a b))]
// KB: I'd prefer
// v (mul64_wide a b) = (pow2 (bits t) + v a - v b) % pow2 (bits t)
inline_for_extraction
val mul_s64_wide: int64 -> int64 -> int128
val mul_s64_wide_lemma: a:int64 -> b:int64 -> Lemma
(v (mul_s64_wide a b) == v a * v b)
[SMTPat (v (mul_s64_wide a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val sub_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level -> a:int_t t l -> b:int_t t l
-> Lemma (v (sub_mod a b) == (v a - v b) @%. t)
[SMTPat (v #t #l (sub_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> int_t t l
val sub_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> Lemma (v (sub a b) == v a - v b)
[SMTPat (v #t #l (sub #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val decr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> int_t t l
val decr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> Lemma (v (decr a) == v a - 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val logxor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logxor_lemma: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(a `logxor` (a `logxor` b) == b /\
a `logxor` (b `logxor` a) == b /\
a `logxor` (mk_int #t #l 0) == a)
val logxor_lemma1: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(requires range (v a) U1 /\ range (v b) U1)
(ensures range (v (a `logxor` b)) U1)
let logxor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logxor #(bits t) a b
| _ -> UInt.logxor #(bits t) a b
val logxor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logxor` b) == v a `logxor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val logand: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logand_zeros: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` zeros t l) == 0)
val logand_ones: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` ones t l) == v a)
// For backwards compatibility
val logand_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = 0 then v (a `logand` b) == 0 else v (a `logand` b) == v b))
let logand_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logand #(bits t) a b
| _ -> UInt.logand #(bits t) a b
val logand_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logand` b) == v a `logand_v` v b)
//[SMTPat (v (a `logand` b))]
val logand_le:#t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l ->
Lemma (requires True)
(ensures v (logand a b) <= v a /\ v (logand a b) <= v b)
val logand_mask: #t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l -> m:pos{m < bits t} ->
Lemma
(requires v b == pow2 m - 1)
(ensures v (logand #t #l a b) == v a % pow2 m)
[@(strict_on_arguments [0])]
inline_for_extraction
val logor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logor_disjoint: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> m:nat{m < bits t}
-> Lemma
(requires 0 <= v a /\ v a < pow2 m /\ v b % pow2 m == 0)
(ensures v (a `logor` b) == v a + v b)
//[SMTPat (v (a `logor` b))]
val logor_zeros: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` zeros t l) == v a)
val logor_ones: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` ones t l) == ones_v t)
// For backwards compatibility
val logor_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = ones_v t then v (a `logor` b) == ones_v t else v (a `logor` b) == v b)) | false | false | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val logor_v (#t: inttype) (a b: range_t t) : range_t t | [] | Lib.IntTypes.logor_v | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Lib.IntTypes.range_t t -> b: Lib.IntTypes.range_t t -> Lib.IntTypes.range_t t | {
"end_col": 33,
"end_line": 628,
"start_col": 2,
"start_line": 626
} |
Prims.Tot | val logand_v (#t: inttype) (a b: range_t t) : range_t t | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let logand_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logand #(bits t) a b
| _ -> UInt.logand #(bits t) a b | val logand_v (#t: inttype) (a b: range_t t) : range_t t
let logand_v (#t: inttype) (a b: range_t t) : range_t t = | false | null | false | match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logand #(bits t) a b
| _ -> UInt.logand #(bits t) a b | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Lib.IntTypes.inttype",
"Lib.IntTypes.range_t",
"FStar.Int.logand",
"Lib.IntTypes.bits",
"FStar.UInt.logand"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128
val bits_numbytes: t:inttype{~(U1? t)} -> Lemma (bits t == 8 * numbytes t)
// [SMTPat [bits t; numbytes t]]
unfold
let modulus (t:inttype) = pow2 (bits t)
[@(strict_on_arguments [0])]
unfold
let maxint (t:inttype) =
if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1
[@(strict_on_arguments [0])]
unfold
let minint (t:inttype) =
if unsigned t then 0 else -(pow2 (bits t - 1))
let range (n:int) (t:inttype) : Type0 =
minint t <= n /\ n <= maxint t
unfold
type range_t (t:inttype) = x:int{range x t}
///
/// PUBLIC Machine Integers
///
inline_for_extraction
let pub_int_t = function
| U1 -> n:UInt8.t{UInt8.v n < 2}
| U8 -> UInt8.t
| U16 -> UInt16.t
| U32 -> UInt32.t
| U64 -> UInt64.t
| U128 -> UInt128.t
| S8 -> Int8.t
| S16 -> Int16.t
| S32 -> Int32.t
| S64 -> Int64.t
| S128 -> Int128.t
[@(strict_on_arguments [0])]
unfold
let pub_int_v #t (x:pub_int_t t) : range_t t =
match t with
| U1 -> UInt8.v x
| U8 -> UInt8.v x
| U16 -> UInt16.v x
| U32 -> UInt32.v x
| U64 -> UInt64.v x
| U128 -> UInt128.v x
| S8 -> Int8.v x
| S16 -> Int16.v x
| S32 -> Int32.v x
| S64 -> Int64.v x
| S128 -> Int128.v x
///
/// SECRET Machine Integers
///
type secrecy_level =
| SEC
| PUB
inline_for_extraction
val sec_int_t: inttype -> Type0
val sec_int_v: #t:inttype -> sec_int_t t -> range_t t
///
/// GENERIC Machine Integers
///
inline_for_extraction
let int_t (t:inttype) (l:secrecy_level) =
match l with
| PUB -> pub_int_t t
| SEC -> sec_int_t t
[@(strict_on_arguments [1])]
let v #t #l (u:int_t t l) : range_t t =
match l with
| PUB -> pub_int_v #t u
| SEC -> sec_int_v #t u
unfold
let uint_t (t:inttype{unsigned t}) (l:secrecy_level) = int_t t l
unfold
let sint_t (t:inttype{signed t}) (l:secrecy_level) = int_t t l
unfold
let uint_v #t #l (u:uint_t t l) = v u
unfold
let sint_v #t #l (u:sint_t t l) = v u
unfold
type uint1 = uint_t U1 SEC
unfold
type uint8 = uint_t U8 SEC
unfold
type int8 = sint_t S8 SEC
unfold
type uint16 = uint_t U16 SEC
unfold
type int16 = sint_t S16 SEC
unfold
type uint32 = uint_t U32 SEC
unfold
type int32 = sint_t S32 SEC
unfold
type uint64 = uint_t U64 SEC
unfold
type int64 = sint_t S64 SEC
unfold
type uint128 = uint_t U128 SEC
unfold
type int128 = sint_t S128 SEC
unfold
type bit_t = uint_t U1 PUB
unfold
type byte_t = uint_t U8 PUB
unfold
type size_t = uint_t U32 PUB
// 2019.7.19: Used only by experimental Blake2b; remove?
unfold
type size128_t = uint_t U128 PUB
unfold
type pub_uint8 = uint_t U8 PUB
unfold
type pub_int8 = sint_t S8 PUB
unfold
type pub_uint16 = uint_t U16 PUB
unfold
type pub_int16 = sint_t S16 PUB
unfold
type pub_uint32 = uint_t U32 PUB
unfold
type pub_int32 = sint_t S32 PUB
unfold
type pub_uint64 = uint_t U64 PUB
unfold
type pub_int64 = sint_t S64 PUB
unfold
type pub_uint128 = uint_t U128 PUB
unfold
type pub_int128 = sint_t S128 PUB
///
/// Casts between mathematical and machine integers
///
inline_for_extraction
val secret: #t:inttype -> x:int_t t PUB -> y:int_t t SEC{v x == v y}
[@(strict_on_arguments [0])]
inline_for_extraction
val mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> u:int_t t l{v u == n}
unfold
let uint (#t:inttype{unsigned t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
unfold
let sint (#t:inttype{signed t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
val v_injective: #t:inttype -> #l:secrecy_level -> a:int_t t l -> Lemma
(mk_int (v #t #l a) == a)
[SMTPat (v #t #l a)]
val v_mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> Lemma
(v #t #l (mk_int #t #l n) == n)
[SMTPat (v #t #l (mk_int #t #l n))]
unfold
let u1 (n:range_t U1) : u:uint1{v u == n} = uint #U1 #SEC n
unfold
let u8 (n:range_t U8) : u:uint8{v u == n} = uint #U8 #SEC n
unfold
let i8 (n:range_t S8) : u:int8{v u == n} = sint #S8 #SEC n
unfold
let u16 (n:range_t U16) : u:uint16{v u == n} = uint #U16 #SEC n
unfold
let i16 (n:range_t S16) : u:int16{v u == n} = sint #S16 #SEC n
unfold
let u32 (n:range_t U32) : u:uint32{v u == n} = uint #U32 #SEC n
unfold
let i32 (n:range_t S32) : u:int32{v u == n} = sint #S32 #SEC n
unfold
let u64 (n:range_t U64) : u:uint64{v u == n} = uint #U64 #SEC n
unfold
let i64 (n:range_t S64) : u:int64{v u == n} = sint #S64 #SEC n
(* We only support 64-bit literals, hence the unexpected upper limit *)
inline_for_extraction
val u128: n:range_t U64 -> u:uint128{v #U128 u == n}
inline_for_extraction
val i128 (n:range_t S64) : u:int128{v #S128 u == n}
unfold
let max_size_t = maxint U32
unfold
type size_nat = n:nat{n <= max_size_t}
unfold
type size_pos = n:pos{n <= max_size_t}
unfold
let size (n:size_nat) : size_t = uint #U32 #PUB n
unfold
let size_v (s:size_t) = v s
unfold
let byte (n:nat{n < 256}) : b:byte_t{v b == n} = uint #U8 #PUB n
unfold
let byte_v (s:byte_t) : n:size_nat{v s == n} = v s
inline_for_extraction
val size_to_uint32: s:size_t -> u:uint32{u == u32 (v s)}
inline_for_extraction
val size_to_uint64: s:size_t -> u:uint64{u == u64 (v s)}
inline_for_extraction
val byte_to_uint8: s:byte_t -> u:uint8{u == u8 (v s)}
[@(strict_on_arguments [0])]
inline_for_extraction
let op_At_Percent_Dot x t =
if unsigned t then x % modulus t
else FStar.Int.(x @% modulus t)
// Casting a value to a signed type is implementation-defined when the value can't
// be represented in the new type; e.g. (int8_t)128UL is implementation-defined
// We rule out this case in the type of `u1`
// See 6.3.1.3 in http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1548.pdf
[@(strict_on_arguments [0;2])]
inline_for_extraction
val cast: #t:inttype -> #l:secrecy_level
-> t':inttype
-> l':secrecy_level{PUB? l \/ SEC? l'}
-> u1:int_t t l{unsigned t' \/ range (v u1) t'}
-> u2:int_t t' l'{v u2 == v u1 @%. t'}
[@(strict_on_arguments [0])]
unfold
let to_u1 #t #l u : uint1 = cast #t #l U1 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u8 #t #l u : uint8 = cast #t #l U8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i8 #t #l u : int8 = cast #t #l S8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u16 #t #l u : uint16 = cast #t #l U16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i16 #t #l u : int16 = cast #t #l S16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u32 #t #l u : uint32 = cast #t #l U32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i32 #t #l u : int32 = cast #t #l S32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u64 #t #l u : uint64 = cast #t #l U64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i64 #t #l u : int64 = cast #t #l S64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u128 #t #l u : uint128 = cast #t #l U128 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i128 #t #l u : int128 = cast #t #l S128 SEC u
///
/// Bitwise operators for all machine integers
///
[@(strict_on_arguments [0])]
inline_for_extraction
let ones_v (t:inttype) =
match t with
| U1 | U8 | U16 | U32 | U64 | U128 -> maxint t
| S8 | S16 | S32 | S64 | S128 -> -1
[@(strict_on_arguments [0])]
inline_for_extraction
val ones: t:inttype -> l:secrecy_level -> n:int_t t l{v n = ones_v t}
inline_for_extraction
val zeros: t:inttype -> l:secrecy_level -> n:int_t t l{v n = 0}
[@(strict_on_arguments [0])]
inline_for_extraction
val add_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val add_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(v (add_mod a b) == (v a + v b) @%. t)
[SMTPat (v #t #l (add_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val add: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> int_t t l
val add_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> Lemma
(v #t #l (add #t #l a b) == v a + v b)
[SMTPat (v #t #l (add #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val incr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> int_t t l
val incr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> Lemma (v (incr a) == v a + 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val mul_mod: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val mul_mod_lemma: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (mul_mod a b) == (v a * v b) @%. t)
[SMTPat (v #t #l (mul_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val mul: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> int_t t l
val mul_lemma: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> Lemma (v #t #l (mul #t #l a b) == v a * v b)
[SMTPat (v #t #l (mul #t #l a b))]
inline_for_extraction
val mul64_wide: uint64 -> uint64 -> uint128
val mul64_wide_lemma: a:uint64 -> b:uint64 -> Lemma
(v (mul64_wide a b) == v a * v b)
[SMTPat (v (mul64_wide a b))]
// KB: I'd prefer
// v (mul64_wide a b) = (pow2 (bits t) + v a - v b) % pow2 (bits t)
inline_for_extraction
val mul_s64_wide: int64 -> int64 -> int128
val mul_s64_wide_lemma: a:int64 -> b:int64 -> Lemma
(v (mul_s64_wide a b) == v a * v b)
[SMTPat (v (mul_s64_wide a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val sub_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level -> a:int_t t l -> b:int_t t l
-> Lemma (v (sub_mod a b) == (v a - v b) @%. t)
[SMTPat (v #t #l (sub_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> int_t t l
val sub_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> Lemma (v (sub a b) == v a - v b)
[SMTPat (v #t #l (sub #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val decr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> int_t t l
val decr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> Lemma (v (decr a) == v a - 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val logxor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logxor_lemma: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(a `logxor` (a `logxor` b) == b /\
a `logxor` (b `logxor` a) == b /\
a `logxor` (mk_int #t #l 0) == a)
val logxor_lemma1: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(requires range (v a) U1 /\ range (v b) U1)
(ensures range (v (a `logxor` b)) U1)
let logxor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logxor #(bits t) a b
| _ -> UInt.logxor #(bits t) a b
val logxor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logxor` b) == v a `logxor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val logand: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logand_zeros: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` zeros t l) == 0)
val logand_ones: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` ones t l) == v a)
// For backwards compatibility
val logand_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = 0 then v (a `logand` b) == 0 else v (a `logand` b) == v b)) | false | false | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val logand_v (#t: inttype) (a b: range_t t) : range_t t | [] | Lib.IntTypes.logand_v | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Lib.IntTypes.range_t t -> b: Lib.IntTypes.range_t t -> Lib.IntTypes.range_t t | {
"end_col": 34,
"end_line": 578,
"start_col": 2,
"start_line": 576
} |
Prims.Tot | val lognot_v (#t: inttype) (a: range_t t) : range_t t | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lognot_v (#t:inttype) (a:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.lognot #(bits t) a
| _ -> UInt.lognot #(bits t) a | val lognot_v (#t: inttype) (a: range_t t) : range_t t
let lognot_v (#t: inttype) (a: range_t t) : range_t t = | false | null | false | match t with
| S8 | S16 | S32 | S64 | S128 -> Int.lognot #(bits t) a
| _ -> UInt.lognot #(bits t) a | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Lib.IntTypes.inttype",
"Lib.IntTypes.range_t",
"FStar.Int.lognot",
"Lib.IntTypes.bits",
"FStar.UInt.lognot"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128
val bits_numbytes: t:inttype{~(U1? t)} -> Lemma (bits t == 8 * numbytes t)
// [SMTPat [bits t; numbytes t]]
unfold
let modulus (t:inttype) = pow2 (bits t)
[@(strict_on_arguments [0])]
unfold
let maxint (t:inttype) =
if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1
[@(strict_on_arguments [0])]
unfold
let minint (t:inttype) =
if unsigned t then 0 else -(pow2 (bits t - 1))
let range (n:int) (t:inttype) : Type0 =
minint t <= n /\ n <= maxint t
unfold
type range_t (t:inttype) = x:int{range x t}
///
/// PUBLIC Machine Integers
///
inline_for_extraction
let pub_int_t = function
| U1 -> n:UInt8.t{UInt8.v n < 2}
| U8 -> UInt8.t
| U16 -> UInt16.t
| U32 -> UInt32.t
| U64 -> UInt64.t
| U128 -> UInt128.t
| S8 -> Int8.t
| S16 -> Int16.t
| S32 -> Int32.t
| S64 -> Int64.t
| S128 -> Int128.t
[@(strict_on_arguments [0])]
unfold
let pub_int_v #t (x:pub_int_t t) : range_t t =
match t with
| U1 -> UInt8.v x
| U8 -> UInt8.v x
| U16 -> UInt16.v x
| U32 -> UInt32.v x
| U64 -> UInt64.v x
| U128 -> UInt128.v x
| S8 -> Int8.v x
| S16 -> Int16.v x
| S32 -> Int32.v x
| S64 -> Int64.v x
| S128 -> Int128.v x
///
/// SECRET Machine Integers
///
type secrecy_level =
| SEC
| PUB
inline_for_extraction
val sec_int_t: inttype -> Type0
val sec_int_v: #t:inttype -> sec_int_t t -> range_t t
///
/// GENERIC Machine Integers
///
inline_for_extraction
let int_t (t:inttype) (l:secrecy_level) =
match l with
| PUB -> pub_int_t t
| SEC -> sec_int_t t
[@(strict_on_arguments [1])]
let v #t #l (u:int_t t l) : range_t t =
match l with
| PUB -> pub_int_v #t u
| SEC -> sec_int_v #t u
unfold
let uint_t (t:inttype{unsigned t}) (l:secrecy_level) = int_t t l
unfold
let sint_t (t:inttype{signed t}) (l:secrecy_level) = int_t t l
unfold
let uint_v #t #l (u:uint_t t l) = v u
unfold
let sint_v #t #l (u:sint_t t l) = v u
unfold
type uint1 = uint_t U1 SEC
unfold
type uint8 = uint_t U8 SEC
unfold
type int8 = sint_t S8 SEC
unfold
type uint16 = uint_t U16 SEC
unfold
type int16 = sint_t S16 SEC
unfold
type uint32 = uint_t U32 SEC
unfold
type int32 = sint_t S32 SEC
unfold
type uint64 = uint_t U64 SEC
unfold
type int64 = sint_t S64 SEC
unfold
type uint128 = uint_t U128 SEC
unfold
type int128 = sint_t S128 SEC
unfold
type bit_t = uint_t U1 PUB
unfold
type byte_t = uint_t U8 PUB
unfold
type size_t = uint_t U32 PUB
// 2019.7.19: Used only by experimental Blake2b; remove?
unfold
type size128_t = uint_t U128 PUB
unfold
type pub_uint8 = uint_t U8 PUB
unfold
type pub_int8 = sint_t S8 PUB
unfold
type pub_uint16 = uint_t U16 PUB
unfold
type pub_int16 = sint_t S16 PUB
unfold
type pub_uint32 = uint_t U32 PUB
unfold
type pub_int32 = sint_t S32 PUB
unfold
type pub_uint64 = uint_t U64 PUB
unfold
type pub_int64 = sint_t S64 PUB
unfold
type pub_uint128 = uint_t U128 PUB
unfold
type pub_int128 = sint_t S128 PUB
///
/// Casts between mathematical and machine integers
///
inline_for_extraction
val secret: #t:inttype -> x:int_t t PUB -> y:int_t t SEC{v x == v y}
[@(strict_on_arguments [0])]
inline_for_extraction
val mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> u:int_t t l{v u == n}
unfold
let uint (#t:inttype{unsigned t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
unfold
let sint (#t:inttype{signed t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
val v_injective: #t:inttype -> #l:secrecy_level -> a:int_t t l -> Lemma
(mk_int (v #t #l a) == a)
[SMTPat (v #t #l a)]
val v_mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> Lemma
(v #t #l (mk_int #t #l n) == n)
[SMTPat (v #t #l (mk_int #t #l n))]
unfold
let u1 (n:range_t U1) : u:uint1{v u == n} = uint #U1 #SEC n
unfold
let u8 (n:range_t U8) : u:uint8{v u == n} = uint #U8 #SEC n
unfold
let i8 (n:range_t S8) : u:int8{v u == n} = sint #S8 #SEC n
unfold
let u16 (n:range_t U16) : u:uint16{v u == n} = uint #U16 #SEC n
unfold
let i16 (n:range_t S16) : u:int16{v u == n} = sint #S16 #SEC n
unfold
let u32 (n:range_t U32) : u:uint32{v u == n} = uint #U32 #SEC n
unfold
let i32 (n:range_t S32) : u:int32{v u == n} = sint #S32 #SEC n
unfold
let u64 (n:range_t U64) : u:uint64{v u == n} = uint #U64 #SEC n
unfold
let i64 (n:range_t S64) : u:int64{v u == n} = sint #S64 #SEC n
(* We only support 64-bit literals, hence the unexpected upper limit *)
inline_for_extraction
val u128: n:range_t U64 -> u:uint128{v #U128 u == n}
inline_for_extraction
val i128 (n:range_t S64) : u:int128{v #S128 u == n}
unfold
let max_size_t = maxint U32
unfold
type size_nat = n:nat{n <= max_size_t}
unfold
type size_pos = n:pos{n <= max_size_t}
unfold
let size (n:size_nat) : size_t = uint #U32 #PUB n
unfold
let size_v (s:size_t) = v s
unfold
let byte (n:nat{n < 256}) : b:byte_t{v b == n} = uint #U8 #PUB n
unfold
let byte_v (s:byte_t) : n:size_nat{v s == n} = v s
inline_for_extraction
val size_to_uint32: s:size_t -> u:uint32{u == u32 (v s)}
inline_for_extraction
val size_to_uint64: s:size_t -> u:uint64{u == u64 (v s)}
inline_for_extraction
val byte_to_uint8: s:byte_t -> u:uint8{u == u8 (v s)}
[@(strict_on_arguments [0])]
inline_for_extraction
let op_At_Percent_Dot x t =
if unsigned t then x % modulus t
else FStar.Int.(x @% modulus t)
// Casting a value to a signed type is implementation-defined when the value can't
// be represented in the new type; e.g. (int8_t)128UL is implementation-defined
// We rule out this case in the type of `u1`
// See 6.3.1.3 in http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1548.pdf
[@(strict_on_arguments [0;2])]
inline_for_extraction
val cast: #t:inttype -> #l:secrecy_level
-> t':inttype
-> l':secrecy_level{PUB? l \/ SEC? l'}
-> u1:int_t t l{unsigned t' \/ range (v u1) t'}
-> u2:int_t t' l'{v u2 == v u1 @%. t'}
[@(strict_on_arguments [0])]
unfold
let to_u1 #t #l u : uint1 = cast #t #l U1 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u8 #t #l u : uint8 = cast #t #l U8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i8 #t #l u : int8 = cast #t #l S8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u16 #t #l u : uint16 = cast #t #l U16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i16 #t #l u : int16 = cast #t #l S16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u32 #t #l u : uint32 = cast #t #l U32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i32 #t #l u : int32 = cast #t #l S32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u64 #t #l u : uint64 = cast #t #l U64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i64 #t #l u : int64 = cast #t #l S64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u128 #t #l u : uint128 = cast #t #l U128 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i128 #t #l u : int128 = cast #t #l S128 SEC u
///
/// Bitwise operators for all machine integers
///
[@(strict_on_arguments [0])]
inline_for_extraction
let ones_v (t:inttype) =
match t with
| U1 | U8 | U16 | U32 | U64 | U128 -> maxint t
| S8 | S16 | S32 | S64 | S128 -> -1
[@(strict_on_arguments [0])]
inline_for_extraction
val ones: t:inttype -> l:secrecy_level -> n:int_t t l{v n = ones_v t}
inline_for_extraction
val zeros: t:inttype -> l:secrecy_level -> n:int_t t l{v n = 0}
[@(strict_on_arguments [0])]
inline_for_extraction
val add_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val add_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(v (add_mod a b) == (v a + v b) @%. t)
[SMTPat (v #t #l (add_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val add: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> int_t t l
val add_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> Lemma
(v #t #l (add #t #l a b) == v a + v b)
[SMTPat (v #t #l (add #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val incr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> int_t t l
val incr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> Lemma (v (incr a) == v a + 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val mul_mod: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val mul_mod_lemma: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (mul_mod a b) == (v a * v b) @%. t)
[SMTPat (v #t #l (mul_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val mul: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> int_t t l
val mul_lemma: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> Lemma (v #t #l (mul #t #l a b) == v a * v b)
[SMTPat (v #t #l (mul #t #l a b))]
inline_for_extraction
val mul64_wide: uint64 -> uint64 -> uint128
val mul64_wide_lemma: a:uint64 -> b:uint64 -> Lemma
(v (mul64_wide a b) == v a * v b)
[SMTPat (v (mul64_wide a b))]
// KB: I'd prefer
// v (mul64_wide a b) = (pow2 (bits t) + v a - v b) % pow2 (bits t)
inline_for_extraction
val mul_s64_wide: int64 -> int64 -> int128
val mul_s64_wide_lemma: a:int64 -> b:int64 -> Lemma
(v (mul_s64_wide a b) == v a * v b)
[SMTPat (v (mul_s64_wide a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val sub_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level -> a:int_t t l -> b:int_t t l
-> Lemma (v (sub_mod a b) == (v a - v b) @%. t)
[SMTPat (v #t #l (sub_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> int_t t l
val sub_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> Lemma (v (sub a b) == v a - v b)
[SMTPat (v #t #l (sub #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val decr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> int_t t l
val decr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> Lemma (v (decr a) == v a - 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val logxor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logxor_lemma: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(a `logxor` (a `logxor` b) == b /\
a `logxor` (b `logxor` a) == b /\
a `logxor` (mk_int #t #l 0) == a)
val logxor_lemma1: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(requires range (v a) U1 /\ range (v b) U1)
(ensures range (v (a `logxor` b)) U1)
let logxor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logxor #(bits t) a b
| _ -> UInt.logxor #(bits t) a b
val logxor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logxor` b) == v a `logxor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val logand: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logand_zeros: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` zeros t l) == 0)
val logand_ones: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` ones t l) == v a)
// For backwards compatibility
val logand_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = 0 then v (a `logand` b) == 0 else v (a `logand` b) == v b))
let logand_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logand #(bits t) a b
| _ -> UInt.logand #(bits t) a b
val logand_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logand` b) == v a `logand_v` v b)
//[SMTPat (v (a `logand` b))]
val logand_le:#t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l ->
Lemma (requires True)
(ensures v (logand a b) <= v a /\ v (logand a b) <= v b)
val logand_mask: #t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l -> m:pos{m < bits t} ->
Lemma
(requires v b == pow2 m - 1)
(ensures v (logand #t #l a b) == v a % pow2 m)
[@(strict_on_arguments [0])]
inline_for_extraction
val logor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logor_disjoint: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> m:nat{m < bits t}
-> Lemma
(requires 0 <= v a /\ v a < pow2 m /\ v b % pow2 m == 0)
(ensures v (a `logor` b) == v a + v b)
//[SMTPat (v (a `logor` b))]
val logor_zeros: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` zeros t l) == v a)
val logor_ones: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` ones t l) == ones_v t)
// For backwards compatibility
val logor_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = ones_v t then v (a `logor` b) == ones_v t else v (a `logor` b) == v b))
let logor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logor #(bits t) a b
| _ -> UInt.logor #(bits t) a b
val logor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logor` b) == v a `logor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val lognot: #t:inttype -> #l:secrecy_level -> int_t t l -> int_t t l
val lognot_lemma: #t: inttype -> #l: secrecy_level ->
a: int_t t l ->
Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = ones_v t then v (lognot a) == 0 else v (lognot a) == ones_v t)) | false | false | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lognot_v (#t: inttype) (a: range_t t) : range_t t | [] | Lib.IntTypes.lognot_v | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Lib.IntTypes.range_t t -> Lib.IntTypes.range_t t | {
"end_col": 32,
"end_line": 649,
"start_col": 2,
"start_line": 647
} |
Prims.Tot | val rotate_left_i (#t: inttype) (#l: secrecy_level) (s: rotval t {unsigned t}) (u: uint_t t l)
: uint_t t l | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rotate_left_i (#t:inttype) (#l:secrecy_level) (s:rotval t{unsigned t}) (u:uint_t t l) : uint_t t l = rotate_left u s | val rotate_left_i (#t: inttype) (#l: secrecy_level) (s: rotval t {unsigned t}) (u: uint_t t l)
: uint_t t l
let rotate_left_i (#t: inttype) (#l: secrecy_level) (s: rotval t {unsigned t}) (u: uint_t t l)
: uint_t t l = | false | null | false | rotate_left u s | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Lib.IntTypes.inttype",
"Lib.IntTypes.secrecy_level",
"Lib.IntTypes.rotval",
"Prims.b2t",
"Lib.IntTypes.unsigned",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.rotate_left"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128
val bits_numbytes: t:inttype{~(U1? t)} -> Lemma (bits t == 8 * numbytes t)
// [SMTPat [bits t; numbytes t]]
unfold
let modulus (t:inttype) = pow2 (bits t)
[@(strict_on_arguments [0])]
unfold
let maxint (t:inttype) =
if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1
[@(strict_on_arguments [0])]
unfold
let minint (t:inttype) =
if unsigned t then 0 else -(pow2 (bits t - 1))
let range (n:int) (t:inttype) : Type0 =
minint t <= n /\ n <= maxint t
unfold
type range_t (t:inttype) = x:int{range x t}
///
/// PUBLIC Machine Integers
///
inline_for_extraction
let pub_int_t = function
| U1 -> n:UInt8.t{UInt8.v n < 2}
| U8 -> UInt8.t
| U16 -> UInt16.t
| U32 -> UInt32.t
| U64 -> UInt64.t
| U128 -> UInt128.t
| S8 -> Int8.t
| S16 -> Int16.t
| S32 -> Int32.t
| S64 -> Int64.t
| S128 -> Int128.t
[@(strict_on_arguments [0])]
unfold
let pub_int_v #t (x:pub_int_t t) : range_t t =
match t with
| U1 -> UInt8.v x
| U8 -> UInt8.v x
| U16 -> UInt16.v x
| U32 -> UInt32.v x
| U64 -> UInt64.v x
| U128 -> UInt128.v x
| S8 -> Int8.v x
| S16 -> Int16.v x
| S32 -> Int32.v x
| S64 -> Int64.v x
| S128 -> Int128.v x
///
/// SECRET Machine Integers
///
type secrecy_level =
| SEC
| PUB
inline_for_extraction
val sec_int_t: inttype -> Type0
val sec_int_v: #t:inttype -> sec_int_t t -> range_t t
///
/// GENERIC Machine Integers
///
inline_for_extraction
let int_t (t:inttype) (l:secrecy_level) =
match l with
| PUB -> pub_int_t t
| SEC -> sec_int_t t
[@(strict_on_arguments [1])]
let v #t #l (u:int_t t l) : range_t t =
match l with
| PUB -> pub_int_v #t u
| SEC -> sec_int_v #t u
unfold
let uint_t (t:inttype{unsigned t}) (l:secrecy_level) = int_t t l
unfold
let sint_t (t:inttype{signed t}) (l:secrecy_level) = int_t t l
unfold
let uint_v #t #l (u:uint_t t l) = v u
unfold
let sint_v #t #l (u:sint_t t l) = v u
unfold
type uint1 = uint_t U1 SEC
unfold
type uint8 = uint_t U8 SEC
unfold
type int8 = sint_t S8 SEC
unfold
type uint16 = uint_t U16 SEC
unfold
type int16 = sint_t S16 SEC
unfold
type uint32 = uint_t U32 SEC
unfold
type int32 = sint_t S32 SEC
unfold
type uint64 = uint_t U64 SEC
unfold
type int64 = sint_t S64 SEC
unfold
type uint128 = uint_t U128 SEC
unfold
type int128 = sint_t S128 SEC
unfold
type bit_t = uint_t U1 PUB
unfold
type byte_t = uint_t U8 PUB
unfold
type size_t = uint_t U32 PUB
// 2019.7.19: Used only by experimental Blake2b; remove?
unfold
type size128_t = uint_t U128 PUB
unfold
type pub_uint8 = uint_t U8 PUB
unfold
type pub_int8 = sint_t S8 PUB
unfold
type pub_uint16 = uint_t U16 PUB
unfold
type pub_int16 = sint_t S16 PUB
unfold
type pub_uint32 = uint_t U32 PUB
unfold
type pub_int32 = sint_t S32 PUB
unfold
type pub_uint64 = uint_t U64 PUB
unfold
type pub_int64 = sint_t S64 PUB
unfold
type pub_uint128 = uint_t U128 PUB
unfold
type pub_int128 = sint_t S128 PUB
///
/// Casts between mathematical and machine integers
///
inline_for_extraction
val secret: #t:inttype -> x:int_t t PUB -> y:int_t t SEC{v x == v y}
[@(strict_on_arguments [0])]
inline_for_extraction
val mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> u:int_t t l{v u == n}
unfold
let uint (#t:inttype{unsigned t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
unfold
let sint (#t:inttype{signed t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
val v_injective: #t:inttype -> #l:secrecy_level -> a:int_t t l -> Lemma
(mk_int (v #t #l a) == a)
[SMTPat (v #t #l a)]
val v_mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> Lemma
(v #t #l (mk_int #t #l n) == n)
[SMTPat (v #t #l (mk_int #t #l n))]
unfold
let u1 (n:range_t U1) : u:uint1{v u == n} = uint #U1 #SEC n
unfold
let u8 (n:range_t U8) : u:uint8{v u == n} = uint #U8 #SEC n
unfold
let i8 (n:range_t S8) : u:int8{v u == n} = sint #S8 #SEC n
unfold
let u16 (n:range_t U16) : u:uint16{v u == n} = uint #U16 #SEC n
unfold
let i16 (n:range_t S16) : u:int16{v u == n} = sint #S16 #SEC n
unfold
let u32 (n:range_t U32) : u:uint32{v u == n} = uint #U32 #SEC n
unfold
let i32 (n:range_t S32) : u:int32{v u == n} = sint #S32 #SEC n
unfold
let u64 (n:range_t U64) : u:uint64{v u == n} = uint #U64 #SEC n
unfold
let i64 (n:range_t S64) : u:int64{v u == n} = sint #S64 #SEC n
(* We only support 64-bit literals, hence the unexpected upper limit *)
inline_for_extraction
val u128: n:range_t U64 -> u:uint128{v #U128 u == n}
inline_for_extraction
val i128 (n:range_t S64) : u:int128{v #S128 u == n}
unfold
let max_size_t = maxint U32
unfold
type size_nat = n:nat{n <= max_size_t}
unfold
type size_pos = n:pos{n <= max_size_t}
unfold
let size (n:size_nat) : size_t = uint #U32 #PUB n
unfold
let size_v (s:size_t) = v s
unfold
let byte (n:nat{n < 256}) : b:byte_t{v b == n} = uint #U8 #PUB n
unfold
let byte_v (s:byte_t) : n:size_nat{v s == n} = v s
inline_for_extraction
val size_to_uint32: s:size_t -> u:uint32{u == u32 (v s)}
inline_for_extraction
val size_to_uint64: s:size_t -> u:uint64{u == u64 (v s)}
inline_for_extraction
val byte_to_uint8: s:byte_t -> u:uint8{u == u8 (v s)}
[@(strict_on_arguments [0])]
inline_for_extraction
let op_At_Percent_Dot x t =
if unsigned t then x % modulus t
else FStar.Int.(x @% modulus t)
// Casting a value to a signed type is implementation-defined when the value can't
// be represented in the new type; e.g. (int8_t)128UL is implementation-defined
// We rule out this case in the type of `u1`
// See 6.3.1.3 in http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1548.pdf
[@(strict_on_arguments [0;2])]
inline_for_extraction
val cast: #t:inttype -> #l:secrecy_level
-> t':inttype
-> l':secrecy_level{PUB? l \/ SEC? l'}
-> u1:int_t t l{unsigned t' \/ range (v u1) t'}
-> u2:int_t t' l'{v u2 == v u1 @%. t'}
[@(strict_on_arguments [0])]
unfold
let to_u1 #t #l u : uint1 = cast #t #l U1 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u8 #t #l u : uint8 = cast #t #l U8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i8 #t #l u : int8 = cast #t #l S8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u16 #t #l u : uint16 = cast #t #l U16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i16 #t #l u : int16 = cast #t #l S16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u32 #t #l u : uint32 = cast #t #l U32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i32 #t #l u : int32 = cast #t #l S32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u64 #t #l u : uint64 = cast #t #l U64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i64 #t #l u : int64 = cast #t #l S64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u128 #t #l u : uint128 = cast #t #l U128 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i128 #t #l u : int128 = cast #t #l S128 SEC u
///
/// Bitwise operators for all machine integers
///
[@(strict_on_arguments [0])]
inline_for_extraction
let ones_v (t:inttype) =
match t with
| U1 | U8 | U16 | U32 | U64 | U128 -> maxint t
| S8 | S16 | S32 | S64 | S128 -> -1
[@(strict_on_arguments [0])]
inline_for_extraction
val ones: t:inttype -> l:secrecy_level -> n:int_t t l{v n = ones_v t}
inline_for_extraction
val zeros: t:inttype -> l:secrecy_level -> n:int_t t l{v n = 0}
[@(strict_on_arguments [0])]
inline_for_extraction
val add_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val add_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(v (add_mod a b) == (v a + v b) @%. t)
[SMTPat (v #t #l (add_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val add: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> int_t t l
val add_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> Lemma
(v #t #l (add #t #l a b) == v a + v b)
[SMTPat (v #t #l (add #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val incr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> int_t t l
val incr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> Lemma (v (incr a) == v a + 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val mul_mod: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val mul_mod_lemma: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (mul_mod a b) == (v a * v b) @%. t)
[SMTPat (v #t #l (mul_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val mul: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> int_t t l
val mul_lemma: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> Lemma (v #t #l (mul #t #l a b) == v a * v b)
[SMTPat (v #t #l (mul #t #l a b))]
inline_for_extraction
val mul64_wide: uint64 -> uint64 -> uint128
val mul64_wide_lemma: a:uint64 -> b:uint64 -> Lemma
(v (mul64_wide a b) == v a * v b)
[SMTPat (v (mul64_wide a b))]
// KB: I'd prefer
// v (mul64_wide a b) = (pow2 (bits t) + v a - v b) % pow2 (bits t)
inline_for_extraction
val mul_s64_wide: int64 -> int64 -> int128
val mul_s64_wide_lemma: a:int64 -> b:int64 -> Lemma
(v (mul_s64_wide a b) == v a * v b)
[SMTPat (v (mul_s64_wide a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val sub_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level -> a:int_t t l -> b:int_t t l
-> Lemma (v (sub_mod a b) == (v a - v b) @%. t)
[SMTPat (v #t #l (sub_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> int_t t l
val sub_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> Lemma (v (sub a b) == v a - v b)
[SMTPat (v #t #l (sub #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val decr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> int_t t l
val decr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> Lemma (v (decr a) == v a - 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val logxor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logxor_lemma: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(a `logxor` (a `logxor` b) == b /\
a `logxor` (b `logxor` a) == b /\
a `logxor` (mk_int #t #l 0) == a)
val logxor_lemma1: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(requires range (v a) U1 /\ range (v b) U1)
(ensures range (v (a `logxor` b)) U1)
let logxor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logxor #(bits t) a b
| _ -> UInt.logxor #(bits t) a b
val logxor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logxor` b) == v a `logxor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val logand: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logand_zeros: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` zeros t l) == 0)
val logand_ones: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` ones t l) == v a)
// For backwards compatibility
val logand_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = 0 then v (a `logand` b) == 0 else v (a `logand` b) == v b))
let logand_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logand #(bits t) a b
| _ -> UInt.logand #(bits t) a b
val logand_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logand` b) == v a `logand_v` v b)
//[SMTPat (v (a `logand` b))]
val logand_le:#t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l ->
Lemma (requires True)
(ensures v (logand a b) <= v a /\ v (logand a b) <= v b)
val logand_mask: #t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l -> m:pos{m < bits t} ->
Lemma
(requires v b == pow2 m - 1)
(ensures v (logand #t #l a b) == v a % pow2 m)
[@(strict_on_arguments [0])]
inline_for_extraction
val logor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logor_disjoint: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> m:nat{m < bits t}
-> Lemma
(requires 0 <= v a /\ v a < pow2 m /\ v b % pow2 m == 0)
(ensures v (a `logor` b) == v a + v b)
//[SMTPat (v (a `logor` b))]
val logor_zeros: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` zeros t l) == v a)
val logor_ones: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` ones t l) == ones_v t)
// For backwards compatibility
val logor_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = ones_v t then v (a `logor` b) == ones_v t else v (a `logor` b) == v b))
let logor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logor #(bits t) a b
| _ -> UInt.logor #(bits t) a b
val logor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logor` b) == v a `logor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val lognot: #t:inttype -> #l:secrecy_level -> int_t t l -> int_t t l
val lognot_lemma: #t: inttype -> #l: secrecy_level ->
a: int_t t l ->
Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = ones_v t then v (lognot a) == 0 else v (lognot a) == ones_v t))
let lognot_v (#t:inttype) (a:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.lognot #(bits t) a
| _ -> UInt.lognot #(bits t) a
val lognot_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> Lemma (v (lognot a) == lognot_v (v a))
inline_for_extraction
type shiftval (t:inttype) = u:size_t{v u < bits t}
inline_for_extraction
type rotval (t:inttype) = u:size_t{0 < v u /\ v u < bits t}
[@(strict_on_arguments [0])]
inline_for_extraction
val shift_right: #t:inttype -> #l:secrecy_level
-> int_t t l
-> shiftval t
-> int_t t l
val shift_right_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:shiftval t
-> Lemma
(v (shift_right a b) == v a / pow2 (v b))
[SMTPat (v #t #l (shift_right #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val shift_left: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> s:shiftval t
-> Pure (int_t t l)
(requires unsigned t \/ (0 <= v a /\ v a * pow2 (v s) <= maxint t))
(ensures fun _ -> True)
val shift_left_lemma:
#t:inttype
-> #l:secrecy_level
-> a:int_t t l{unsigned t \/ 0 <= v a}
-> s:shiftval t{unsigned t \/ (0 <= v a /\ v a * pow2 (v s) <= maxint t)}
-> Lemma
(v (shift_left a s) == (v a * pow2 (v s)) @%. t)
[SMTPat (v #t #l (shift_left #t #l a s))]
[@(strict_on_arguments [0])]
inline_for_extraction
val rotate_right: #t:inttype -> #l:secrecy_level
-> a:int_t t l{unsigned t}
-> rotval t
-> int_t t l
[@(strict_on_arguments [0])]
inline_for_extraction
val rotate_left: #t:inttype -> #l:secrecy_level
-> a:int_t t l{unsigned t}
-> rotval t
-> int_t t l
inline_for_extraction
let shift_right_i (#t:inttype) (#l:secrecy_level) (s:shiftval t{unsigned t}) (u:uint_t t l) : uint_t t l = shift_right u s
inline_for_extraction
let shift_left_i (#t:inttype) (#l:secrecy_level) (s:shiftval t{unsigned t}) (u:uint_t t l) : uint_t t l = shift_left u s
inline_for_extraction
let rotate_right_i (#t:inttype) (#l:secrecy_level) (s:rotval t{unsigned t}) (u:uint_t t l) : uint_t t l = rotate_right u s | false | false | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val rotate_left_i (#t: inttype) (#l: secrecy_level) (s: rotval t {unsigned t}) (u: uint_t t l)
: uint_t t l | [] | Lib.IntTypes.rotate_left_i | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Lib.IntTypes.rotval t {Lib.IntTypes.unsigned t} -> u133: Lib.IntTypes.uint_t t l
-> Lib.IntTypes.uint_t t l | {
"end_col": 120,
"end_line": 717,
"start_col": 105,
"start_line": 717
} |
Prims.Tot | val mod_mask (#t: inttype) (#l: secrecy_level) (m: shiftval t {pow2 (uint_v m) <= maxint t})
: int_t t l | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mod_mask (#t:inttype) (#l:secrecy_level) (m:shiftval t{pow2 (uint_v m) <= maxint t}) : int_t t l =
shift_left_lemma #t #l (mk_int 1) m;
(mk_int 1 `shift_left` m) `sub` mk_int 1 | val mod_mask (#t: inttype) (#l: secrecy_level) (m: shiftval t {pow2 (uint_v m) <= maxint t})
: int_t t l
let mod_mask (#t: inttype) (#l: secrecy_level) (m: shiftval t {pow2 (uint_v m) <= maxint t})
: int_t t l = | false | null | false | shift_left_lemma #t #l (mk_int 1) m;
((mk_int 1) `shift_left` m) `sub` (mk_int 1) | {
"checked_file": "Lib.IntTypes.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.UInt128.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int8.fsti.checked",
"FStar.Int64.fsti.checked",
"FStar.Int32.fsti.checked",
"FStar.Int16.fsti.checked",
"FStar.Int128.fsti.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "Lib.IntTypes.fsti"
} | [
"total"
] | [
"Lib.IntTypes.inttype",
"Lib.IntTypes.secrecy_level",
"Lib.IntTypes.shiftval",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.pow2",
"Lib.IntTypes.uint_v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Lib.IntTypes.maxint",
"Lib.IntTypes.sub",
"Lib.IntTypes.shift_left",
"Lib.IntTypes.mk_int",
"Prims.unit",
"Lib.IntTypes.shift_left_lemma",
"Lib.IntTypes.int_t"
] | [] | module Lib.IntTypes
open FStar.Mul
#push-options "--max_fuel 0 --max_ifuel 1 --z3rlimit 20"
// Other instances frollow from `FStar.UInt.pow2_values` which is in
// scope of every module depending on Lib.IntTypes
val pow2_2: n:nat -> Lemma (pow2 2 = 4) [SMTPat (pow2 n)]
val pow2_3: n:nat -> Lemma (pow2 3 = 8) [SMTPat (pow2 n)]
val pow2_4: n:nat -> Lemma (pow2 4 = 16) [SMTPat (pow2 n)]
val pow2_127: n:nat -> Lemma (pow2 127 = 0x80000000000000000000000000000000) [SMTPat (pow2 n)]
///
/// Definition of machine integer base types
///
type inttype =
| U1 | U8 | U16 | U32 | U64 | U128 | S8 | S16 | S32 | S64 | S128
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let unsigned = function
| U1 | U8 | U16 | U32 | U64 | U128 -> true
| _ -> false
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let signed = function
| S8 | S16 | S32 | S64 | S128 -> true
| _ -> false
///
/// Operations on the underlying machine integer base types
///
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let numbytes = function
| U1 -> 1
| U8 -> 1
| S8 -> 1
| U16 -> 2
| S16 -> 2
| U32 -> 4
| S32 -> 4
| U64 -> 8
| S64 -> 8
| U128 -> 16
| S128 -> 16
[@(strict_on_arguments [0])]
unfold
inline_for_extraction
let bits = function
| U1 -> 1
| U8 -> 8
| S8 -> 8
| U16 -> 16
| S16 -> 16
| U32 -> 32
| S32 -> 32
| U64 -> 64
| S64 -> 64
| U128 -> 128
| S128 -> 128
val bits_numbytes: t:inttype{~(U1? t)} -> Lemma (bits t == 8 * numbytes t)
// [SMTPat [bits t; numbytes t]]
unfold
let modulus (t:inttype) = pow2 (bits t)
[@(strict_on_arguments [0])]
unfold
let maxint (t:inttype) =
if unsigned t then pow2 (bits t) - 1 else pow2 (bits t - 1) - 1
[@(strict_on_arguments [0])]
unfold
let minint (t:inttype) =
if unsigned t then 0 else -(pow2 (bits t - 1))
let range (n:int) (t:inttype) : Type0 =
minint t <= n /\ n <= maxint t
unfold
type range_t (t:inttype) = x:int{range x t}
///
/// PUBLIC Machine Integers
///
inline_for_extraction
let pub_int_t = function
| U1 -> n:UInt8.t{UInt8.v n < 2}
| U8 -> UInt8.t
| U16 -> UInt16.t
| U32 -> UInt32.t
| U64 -> UInt64.t
| U128 -> UInt128.t
| S8 -> Int8.t
| S16 -> Int16.t
| S32 -> Int32.t
| S64 -> Int64.t
| S128 -> Int128.t
[@(strict_on_arguments [0])]
unfold
let pub_int_v #t (x:pub_int_t t) : range_t t =
match t with
| U1 -> UInt8.v x
| U8 -> UInt8.v x
| U16 -> UInt16.v x
| U32 -> UInt32.v x
| U64 -> UInt64.v x
| U128 -> UInt128.v x
| S8 -> Int8.v x
| S16 -> Int16.v x
| S32 -> Int32.v x
| S64 -> Int64.v x
| S128 -> Int128.v x
///
/// SECRET Machine Integers
///
type secrecy_level =
| SEC
| PUB
inline_for_extraction
val sec_int_t: inttype -> Type0
val sec_int_v: #t:inttype -> sec_int_t t -> range_t t
///
/// GENERIC Machine Integers
///
inline_for_extraction
let int_t (t:inttype) (l:secrecy_level) =
match l with
| PUB -> pub_int_t t
| SEC -> sec_int_t t
[@(strict_on_arguments [1])]
let v #t #l (u:int_t t l) : range_t t =
match l with
| PUB -> pub_int_v #t u
| SEC -> sec_int_v #t u
unfold
let uint_t (t:inttype{unsigned t}) (l:secrecy_level) = int_t t l
unfold
let sint_t (t:inttype{signed t}) (l:secrecy_level) = int_t t l
unfold
let uint_v #t #l (u:uint_t t l) = v u
unfold
let sint_v #t #l (u:sint_t t l) = v u
unfold
type uint1 = uint_t U1 SEC
unfold
type uint8 = uint_t U8 SEC
unfold
type int8 = sint_t S8 SEC
unfold
type uint16 = uint_t U16 SEC
unfold
type int16 = sint_t S16 SEC
unfold
type uint32 = uint_t U32 SEC
unfold
type int32 = sint_t S32 SEC
unfold
type uint64 = uint_t U64 SEC
unfold
type int64 = sint_t S64 SEC
unfold
type uint128 = uint_t U128 SEC
unfold
type int128 = sint_t S128 SEC
unfold
type bit_t = uint_t U1 PUB
unfold
type byte_t = uint_t U8 PUB
unfold
type size_t = uint_t U32 PUB
// 2019.7.19: Used only by experimental Blake2b; remove?
unfold
type size128_t = uint_t U128 PUB
unfold
type pub_uint8 = uint_t U8 PUB
unfold
type pub_int8 = sint_t S8 PUB
unfold
type pub_uint16 = uint_t U16 PUB
unfold
type pub_int16 = sint_t S16 PUB
unfold
type pub_uint32 = uint_t U32 PUB
unfold
type pub_int32 = sint_t S32 PUB
unfold
type pub_uint64 = uint_t U64 PUB
unfold
type pub_int64 = sint_t S64 PUB
unfold
type pub_uint128 = uint_t U128 PUB
unfold
type pub_int128 = sint_t S128 PUB
///
/// Casts between mathematical and machine integers
///
inline_for_extraction
val secret: #t:inttype -> x:int_t t PUB -> y:int_t t SEC{v x == v y}
[@(strict_on_arguments [0])]
inline_for_extraction
val mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> u:int_t t l{v u == n}
unfold
let uint (#t:inttype{unsigned t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
unfold
let sint (#t:inttype{signed t}) (#l:secrecy_level) (n:range_t t) = mk_int #t #l n
val v_injective: #t:inttype -> #l:secrecy_level -> a:int_t t l -> Lemma
(mk_int (v #t #l a) == a)
[SMTPat (v #t #l a)]
val v_mk_int: #t:inttype -> #l:secrecy_level -> n:range_t t -> Lemma
(v #t #l (mk_int #t #l n) == n)
[SMTPat (v #t #l (mk_int #t #l n))]
unfold
let u1 (n:range_t U1) : u:uint1{v u == n} = uint #U1 #SEC n
unfold
let u8 (n:range_t U8) : u:uint8{v u == n} = uint #U8 #SEC n
unfold
let i8 (n:range_t S8) : u:int8{v u == n} = sint #S8 #SEC n
unfold
let u16 (n:range_t U16) : u:uint16{v u == n} = uint #U16 #SEC n
unfold
let i16 (n:range_t S16) : u:int16{v u == n} = sint #S16 #SEC n
unfold
let u32 (n:range_t U32) : u:uint32{v u == n} = uint #U32 #SEC n
unfold
let i32 (n:range_t S32) : u:int32{v u == n} = sint #S32 #SEC n
unfold
let u64 (n:range_t U64) : u:uint64{v u == n} = uint #U64 #SEC n
unfold
let i64 (n:range_t S64) : u:int64{v u == n} = sint #S64 #SEC n
(* We only support 64-bit literals, hence the unexpected upper limit *)
inline_for_extraction
val u128: n:range_t U64 -> u:uint128{v #U128 u == n}
inline_for_extraction
val i128 (n:range_t S64) : u:int128{v #S128 u == n}
unfold
let max_size_t = maxint U32
unfold
type size_nat = n:nat{n <= max_size_t}
unfold
type size_pos = n:pos{n <= max_size_t}
unfold
let size (n:size_nat) : size_t = uint #U32 #PUB n
unfold
let size_v (s:size_t) = v s
unfold
let byte (n:nat{n < 256}) : b:byte_t{v b == n} = uint #U8 #PUB n
unfold
let byte_v (s:byte_t) : n:size_nat{v s == n} = v s
inline_for_extraction
val size_to_uint32: s:size_t -> u:uint32{u == u32 (v s)}
inline_for_extraction
val size_to_uint64: s:size_t -> u:uint64{u == u64 (v s)}
inline_for_extraction
val byte_to_uint8: s:byte_t -> u:uint8{u == u8 (v s)}
[@(strict_on_arguments [0])]
inline_for_extraction
let op_At_Percent_Dot x t =
if unsigned t then x % modulus t
else FStar.Int.(x @% modulus t)
// Casting a value to a signed type is implementation-defined when the value can't
// be represented in the new type; e.g. (int8_t)128UL is implementation-defined
// We rule out this case in the type of `u1`
// See 6.3.1.3 in http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1548.pdf
[@(strict_on_arguments [0;2])]
inline_for_extraction
val cast: #t:inttype -> #l:secrecy_level
-> t':inttype
-> l':secrecy_level{PUB? l \/ SEC? l'}
-> u1:int_t t l{unsigned t' \/ range (v u1) t'}
-> u2:int_t t' l'{v u2 == v u1 @%. t'}
[@(strict_on_arguments [0])]
unfold
let to_u1 #t #l u : uint1 = cast #t #l U1 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u8 #t #l u : uint8 = cast #t #l U8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i8 #t #l u : int8 = cast #t #l S8 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u16 #t #l u : uint16 = cast #t #l U16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i16 #t #l u : int16 = cast #t #l S16 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u32 #t #l u : uint32 = cast #t #l U32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i32 #t #l u : int32 = cast #t #l S32 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u64 #t #l u : uint64 = cast #t #l U64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i64 #t #l u : int64 = cast #t #l S64 SEC u
[@(strict_on_arguments [0])]
unfold
let to_u128 #t #l u : uint128 = cast #t #l U128 SEC u
[@(strict_on_arguments [0])]
unfold
let to_i128 #t #l u : int128 = cast #t #l S128 SEC u
///
/// Bitwise operators for all machine integers
///
[@(strict_on_arguments [0])]
inline_for_extraction
let ones_v (t:inttype) =
match t with
| U1 | U8 | U16 | U32 | U64 | U128 -> maxint t
| S8 | S16 | S32 | S64 | S128 -> -1
[@(strict_on_arguments [0])]
inline_for_extraction
val ones: t:inttype -> l:secrecy_level -> n:int_t t l{v n = ones_v t}
inline_for_extraction
val zeros: t:inttype -> l:secrecy_level -> n:int_t t l{v n = 0}
[@(strict_on_arguments [0])]
inline_for_extraction
val add_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val add_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(v (add_mod a b) == (v a + v b) @%. t)
[SMTPat (v #t #l (add_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val add: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> int_t t l
val add_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a + v b) t}
-> Lemma
(v #t #l (add #t #l a b) == v a + v b)
[SMTPat (v #t #l (add #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val incr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> int_t t l
val incr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{v a < maxint t}
-> Lemma (v (incr a) == v a + 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val mul_mod: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val mul_mod_lemma: #t:inttype{unsigned t /\ ~(U128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (mul_mod a b) == (v a * v b) @%. t)
[SMTPat (v #t #l (mul_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val mul: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> int_t t l
val mul_lemma: #t:inttype{~(U128? t) /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a * v b) t}
-> Lemma (v #t #l (mul #t #l a b) == v a * v b)
[SMTPat (v #t #l (mul #t #l a b))]
inline_for_extraction
val mul64_wide: uint64 -> uint64 -> uint128
val mul64_wide_lemma: a:uint64 -> b:uint64 -> Lemma
(v (mul64_wide a b) == v a * v b)
[SMTPat (v (mul64_wide a b))]
// KB: I'd prefer
// v (mul64_wide a b) = (pow2 (bits t) + v a - v b) % pow2 (bits t)
inline_for_extraction
val mul_s64_wide: int64 -> int64 -> int128
val mul_s64_wide_lemma: a:int64 -> b:int64 -> Lemma
(v (mul_s64_wide a b) == v a * v b)
[SMTPat (v (mul_s64_wide a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub_mod: #t:inttype{unsigned t} -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val sub_mod_lemma: #t:inttype{unsigned t} -> #l:secrecy_level -> a:int_t t l -> b:int_t t l
-> Lemma (v (sub_mod a b) == (v a - v b) @%. t)
[SMTPat (v #t #l (sub_mod #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val sub: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> int_t t l
val sub_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l{range (v a - v b) t}
-> Lemma (v (sub a b) == v a - v b)
[SMTPat (v #t #l (sub #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val decr: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> int_t t l
val decr_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> Lemma (v (decr a) == v a - 1)
[@(strict_on_arguments [0])]
inline_for_extraction
val logxor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logxor_lemma: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(a `logxor` (a `logxor` b) == b /\
a `logxor` (b `logxor` a) == b /\
a `logxor` (mk_int #t #l 0) == a)
val logxor_lemma1: #t:inttype -> #l:secrecy_level -> a:int_t t l -> b:int_t t l -> Lemma
(requires range (v a) U1 /\ range (v b) U1)
(ensures range (v (a `logxor` b)) U1)
let logxor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logxor #(bits t) a b
| _ -> UInt.logxor #(bits t) a b
val logxor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logxor` b) == v a `logxor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val logand: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logand_zeros: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` zeros t l) == 0)
val logand_ones: #t:inttype -> #l:secrecy_level -> a:int_t t l ->
Lemma (v (a `logand` ones t l) == v a)
// For backwards compatibility
val logand_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = 0 then v (a `logand` b) == 0 else v (a `logand` b) == v b))
let logand_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logand #(bits t) a b
| _ -> UInt.logand #(bits t) a b
val logand_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logand` b) == v a `logand_v` v b)
//[SMTPat (v (a `logand` b))]
val logand_le:#t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l ->
Lemma (requires True)
(ensures v (logand a b) <= v a /\ v (logand a b) <= v b)
val logand_mask: #t:inttype{unsigned t} -> #l:secrecy_level -> a:uint_t t l -> b:uint_t t l -> m:pos{m < bits t} ->
Lemma
(requires v b == pow2 m - 1)
(ensures v (logand #t #l a b) == v a % pow2 m)
[@(strict_on_arguments [0])]
inline_for_extraction
val logor: #t:inttype -> #l:secrecy_level
-> int_t t l
-> int_t t l
-> int_t t l
val logor_disjoint: #t:inttype{unsigned t} -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> m:nat{m < bits t}
-> Lemma
(requires 0 <= v a /\ v a < pow2 m /\ v b % pow2 m == 0)
(ensures v (a `logor` b) == v a + v b)
//[SMTPat (v (a `logor` b))]
val logor_zeros: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` zeros t l) == v a)
val logor_ones: #t: inttype -> #l: secrecy_level -> a: int_t t l ->
Lemma (v (a `logor` ones t l) == ones_v t)
// For backwards compatibility
val logor_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = ones_v t then v (a `logor` b) == ones_v t else v (a `logor` b) == v b))
let logor_v (#t:inttype) (a:range_t t) (b:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.logor #(bits t) a b
| _ -> UInt.logor #(bits t) a b
val logor_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:int_t t l
-> Lemma (v (a `logor` b) == v a `logor_v` v b)
[@(strict_on_arguments [0])]
inline_for_extraction
val lognot: #t:inttype -> #l:secrecy_level -> int_t t l -> int_t t l
val lognot_lemma: #t: inttype -> #l: secrecy_level ->
a: int_t t l ->
Lemma
(requires v a = 0 \/ v a = ones_v t)
(ensures (if v a = ones_v t then v (lognot a) == 0 else v (lognot a) == ones_v t))
let lognot_v (#t:inttype) (a:range_t t) : range_t t =
match t with
| S8 | S16 | S32 | S64 | S128 -> Int.lognot #(bits t) a
| _ -> UInt.lognot #(bits t) a
val lognot_spec: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> Lemma (v (lognot a) == lognot_v (v a))
inline_for_extraction
type shiftval (t:inttype) = u:size_t{v u < bits t}
inline_for_extraction
type rotval (t:inttype) = u:size_t{0 < v u /\ v u < bits t}
[@(strict_on_arguments [0])]
inline_for_extraction
val shift_right: #t:inttype -> #l:secrecy_level
-> int_t t l
-> shiftval t
-> int_t t l
val shift_right_lemma: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> b:shiftval t
-> Lemma
(v (shift_right a b) == v a / pow2 (v b))
[SMTPat (v #t #l (shift_right #t #l a b))]
[@(strict_on_arguments [0])]
inline_for_extraction
val shift_left: #t:inttype -> #l:secrecy_level
-> a:int_t t l
-> s:shiftval t
-> Pure (int_t t l)
(requires unsigned t \/ (0 <= v a /\ v a * pow2 (v s) <= maxint t))
(ensures fun _ -> True)
val shift_left_lemma:
#t:inttype
-> #l:secrecy_level
-> a:int_t t l{unsigned t \/ 0 <= v a}
-> s:shiftval t{unsigned t \/ (0 <= v a /\ v a * pow2 (v s) <= maxint t)}
-> Lemma
(v (shift_left a s) == (v a * pow2 (v s)) @%. t)
[SMTPat (v #t #l (shift_left #t #l a s))]
[@(strict_on_arguments [0])]
inline_for_extraction
val rotate_right: #t:inttype -> #l:secrecy_level
-> a:int_t t l{unsigned t}
-> rotval t
-> int_t t l
[@(strict_on_arguments [0])]
inline_for_extraction
val rotate_left: #t:inttype -> #l:secrecy_level
-> a:int_t t l{unsigned t}
-> rotval t
-> int_t t l
inline_for_extraction
let shift_right_i (#t:inttype) (#l:secrecy_level) (s:shiftval t{unsigned t}) (u:uint_t t l) : uint_t t l = shift_right u s
inline_for_extraction
let shift_left_i (#t:inttype) (#l:secrecy_level) (s:shiftval t{unsigned t}) (u:uint_t t l) : uint_t t l = shift_left u s
inline_for_extraction
let rotate_right_i (#t:inttype) (#l:secrecy_level) (s:rotval t{unsigned t}) (u:uint_t t l) : uint_t t l = rotate_right u s
inline_for_extraction
let rotate_left_i (#t:inttype) (#l:secrecy_level) (s:rotval t{unsigned t}) (u:uint_t t l) : uint_t t l = rotate_left u s
[@(strict_on_arguments [0])]
inline_for_extraction
val ct_abs: #t:inttype{signed t /\ ~(S128? t)} -> #l:secrecy_level
-> a:int_t t l{minint t < v a}
-> b:int_t t l{v b == abs (v a)}
///
/// Masking operators for all machine integers
///
[@(strict_on_arguments [0])]
inline_for_extraction
val eq_mask: #t:inttype{~(S128? t)} -> int_t t SEC -> int_t t SEC -> int_t t SEC
val eq_mask_lemma: #t:inttype{~(S128? t)} -> a:int_t t SEC -> b:int_t t SEC -> Lemma
(if v a = v b then v (eq_mask a b) == ones_v t
else v (eq_mask a b) == 0)
[SMTPat (eq_mask #t a b)]
val eq_mask_logand_lemma:
#t:inttype{~(S128? t)}
-> a:int_t t SEC
-> b:int_t t SEC
-> c:int_t t SEC -> Lemma
(if v a = v b then v (c `logand` eq_mask a b) == v c
else v (c `logand` eq_mask a b) == 0)
[SMTPat (c `logand` eq_mask a b)]
[@(strict_on_arguments [0])]
inline_for_extraction
val neq_mask: #t:inttype{~(S128? t)} -> a:int_t t SEC -> b:int_t t SEC -> int_t t SEC
val neq_mask_lemma: #t:inttype{~(S128? t)} -> a:int_t t SEC -> b:int_t t SEC -> Lemma
(if v a = v b then v (neq_mask a b) == 0
else v (neq_mask a b) == ones_v t)
[SMTPat (neq_mask #t a b)]
[@(strict_on_arguments [0])]
inline_for_extraction
val gte_mask: #t:inttype{unsigned t} -> int_t t SEC -> b:int_t t SEC -> int_t t SEC
val gte_mask_lemma: #t:inttype{unsigned t} -> a:int_t t SEC -> b:int_t t SEC -> Lemma
(if v a >= v b then v (gte_mask a b) == ones_v t
else v (gte_mask a b) == 0)
[SMTPat (gte_mask #t a b)]
val gte_mask_logand_lemma: #t:inttype{unsigned t}
-> a:int_t t SEC
-> b:int_t t SEC
-> c:int_t t SEC
-> Lemma
(if v a >= v b then v (c `logand` gte_mask a b) == v c
else v (c `logand` gte_mask a b) == 0)
[SMTPat (c `logand` gte_mask a b)]
[@(strict_on_arguments [0])]
inline_for_extraction
val lt_mask: #t:inttype{unsigned t} -> int_t t SEC -> int_t t SEC -> int_t t SEC
val lt_mask_lemma: #t:inttype{unsigned t} -> a:int_t t SEC -> b:int_t t SEC -> Lemma
(if v a < v b then v (lt_mask a b) == ones_v t
else v (lt_mask a b) == 0)
[SMTPat (lt_mask #t a b)]
[@(strict_on_arguments [0])]
inline_for_extraction
val gt_mask: #t:inttype{unsigned t} -> int_t t SEC -> b:int_t t SEC -> int_t t SEC
val gt_mask_lemma: #t:inttype{unsigned t} -> a:int_t t SEC -> b:int_t t SEC -> Lemma
(if v a > v b then v (gt_mask a b) == ones_v t
else v (gt_mask a b) == 0)
[SMTPat (gt_mask #t a b)]
[@(strict_on_arguments [0])]
inline_for_extraction
val lte_mask: #t:inttype{unsigned t} -> int_t t SEC -> int_t t SEC -> int_t t SEC
val lte_mask_lemma: #t:inttype{unsigned t} -> a:int_t t SEC -> b:int_t t SEC -> Lemma
(if v a <= v b then v (lte_mask a b) == ones_v t
else v (lte_mask a b) == 0)
[SMTPat (lte_mask #t a b)]
#push-options "--max_fuel 1"
[@(strict_on_arguments [0])]
inline_for_extraction | false | false | Lib.IntTypes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mod_mask (#t: inttype) (#l: secrecy_level) (m: shiftval t {pow2 (uint_v m) <= maxint t})
: int_t t l | [] | Lib.IntTypes.mod_mask | {
"file_name": "lib/Lib.IntTypes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Lib.IntTypes.shiftval t {Prims.pow2 (Lib.IntTypes.uint_v m) <= Lib.IntTypes.maxint t}
-> Lib.IntTypes.int_t t l | {
"end_col": 42,
"end_line": 808,
"start_col": 2,
"start_line": 807
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Spec.Agile.AEAD",
"short_module": "AEAD"
},
{
"abbrev": true,
"full_module": "Spec.Agile.HPKE",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.HPKE.Interface",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.HPKE.Interface",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let iv (a:AEAD.alg) = lbuffer uint8 12ul | let iv (a: AEAD.alg) = | false | null | false | lbuffer uint8 12ul | {
"checked_file": "Hacl.HPKE.Interface.AEAD.fsti.checked",
"dependencies": [
"Spec.Agile.HPKE.fsti.checked",
"Spec.Agile.AEAD.fsti.checked",
"prims.fst.checked",
"Meta.Attribute.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.HPKE.Interface.AEAD.fsti"
} | [
"total"
] | [
"Spec.Agile.AEAD.alg",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"FStar.UInt32.__uint_to_t"
] | [] | module Hacl.HPKE.Interface.AEAD
open FStar.HyperStack
open FStar.HyperStack.All
open Lib.IntTypes
open Lib.Buffer
module S = Spec.Agile.HPKE
module AEAD = Spec.Agile.AEAD
inline_for_extraction noextract
let kv (a:AEAD.alg) = lbuffer uint8 (size (AEAD.key_length a)) | false | true | Hacl.HPKE.Interface.AEAD.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val iv : a: Spec.Agile.AEAD.alg -> Type0 | [] | Hacl.HPKE.Interface.AEAD.iv | {
"file_name": "code/hpke/Hacl.HPKE.Interface.AEAD.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Spec.Agile.AEAD.alg -> Type0 | {
"end_col": 40,
"end_line": 15,
"start_col": 22,
"start_line": 15
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Spec.Agile.AEAD",
"short_module": "AEAD"
},
{
"abbrev": true,
"full_module": "Spec.Agile.HPKE",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.HPKE.Interface",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.HPKE.Interface",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tag (a:AEAD.alg) = lbuffer uint8 (size (AEAD.tag_length a)) | let tag (a: AEAD.alg) = | false | null | false | lbuffer uint8 (size (AEAD.tag_length a)) | {
"checked_file": "Hacl.HPKE.Interface.AEAD.fsti.checked",
"dependencies": [
"Spec.Agile.HPKE.fsti.checked",
"Spec.Agile.AEAD.fsti.checked",
"prims.fst.checked",
"Meta.Attribute.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.HPKE.Interface.AEAD.fsti"
} | [
"total"
] | [
"Spec.Agile.AEAD.alg",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"Lib.IntTypes.size",
"Spec.Agile.AEAD.tag_length"
] | [] | module Hacl.HPKE.Interface.AEAD
open FStar.HyperStack
open FStar.HyperStack.All
open Lib.IntTypes
open Lib.Buffer
module S = Spec.Agile.HPKE
module AEAD = Spec.Agile.AEAD
inline_for_extraction noextract
let kv (a:AEAD.alg) = lbuffer uint8 (size (AEAD.key_length a))
inline_for_extraction noextract
let iv (a:AEAD.alg) = lbuffer uint8 12ul | false | true | Hacl.HPKE.Interface.AEAD.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tag : a: Spec.Agile.AEAD.alg -> Type0 | [] | Hacl.HPKE.Interface.AEAD.tag | {
"file_name": "code/hpke/Hacl.HPKE.Interface.AEAD.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Spec.Agile.AEAD.alg -> Type0 | {
"end_col": 63,
"end_line": 17,
"start_col": 23,
"start_line": 17
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Spec.Agile.AEAD",
"short_module": "AEAD"
},
{
"abbrev": true,
"full_module": "Spec.Agile.HPKE",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.HPKE.Interface",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.HPKE.Interface",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let kv (a:AEAD.alg) = lbuffer uint8 (size (AEAD.key_length a)) | let kv (a: AEAD.alg) = | false | null | false | lbuffer uint8 (size (AEAD.key_length a)) | {
"checked_file": "Hacl.HPKE.Interface.AEAD.fsti.checked",
"dependencies": [
"Spec.Agile.HPKE.fsti.checked",
"Spec.Agile.AEAD.fsti.checked",
"prims.fst.checked",
"Meta.Attribute.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.HPKE.Interface.AEAD.fsti"
} | [
"total"
] | [
"Spec.Agile.AEAD.alg",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"Lib.IntTypes.size",
"Spec.Agile.AEAD.key_length"
] | [] | module Hacl.HPKE.Interface.AEAD
open FStar.HyperStack
open FStar.HyperStack.All
open Lib.IntTypes
open Lib.Buffer
module S = Spec.Agile.HPKE
module AEAD = Spec.Agile.AEAD | false | true | Hacl.HPKE.Interface.AEAD.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val kv : a: Spec.Agile.AEAD.alg -> Type0 | [] | Hacl.HPKE.Interface.AEAD.kv | {
"file_name": "code/hpke/Hacl.HPKE.Interface.AEAD.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Spec.Agile.AEAD.alg -> Type0 | {
"end_col": 62,
"end_line": 13,
"start_col": 22,
"start_line": 13
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Spec.Agile.AEAD",
"short_module": "AEAD"
},
{
"abbrev": true,
"full_module": "Spec.Agile.HPKE",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.HPKE.Interface",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.HPKE.Interface",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aead_encrypt_st (a:S.aead) =
_:squash (S.Seal? a /\ S.is_valid_aead a)
-> key:kv (S.Seal?.alg a)
-> nonce:iv (S.Seal?.alg a)
-> alen:size_t{v alen <= AEAD.max_length (S.Seal?.alg a)}
-> aad:lbuffer uint8 alen
-> len:size_t{v len + 16 <= max_size_t}
-> input:lbuffer uint8 len
-> output:lbuffer uint8 (size (v len + 16)) ->
Stack unit
(requires fun h ->
live h key /\ live h nonce /\ live h aad /\
live h input /\ live h output /\
disjoint key output /\ disjoint nonce output /\
eq_or_disjoint input output /\ disjoint aad output)
(ensures fun h0 _ h1 -> modifies (loc output) h0 h1 /\
(as_seq h1 output) `Seq.equal`
AEAD.encrypt #(S.Seal?.alg a) (as_seq h0 key) (as_seq h0 nonce) (as_seq h0 aad) (as_seq h0 input)) | let aead_encrypt_st (a: S.aead) = | false | null | false |
_: squash (S.Seal? a /\ S.is_valid_aead a) ->
key: kv (S.Seal?.alg a) ->
nonce: iv (S.Seal?.alg a) ->
alen: size_t{v alen <= AEAD.max_length (S.Seal?.alg a)} ->
aad: lbuffer uint8 alen ->
len: size_t{v len + 16 <= max_size_t} ->
input: lbuffer uint8 len ->
output: lbuffer uint8 (size (v len + 16))
-> Stack unit
(requires
fun h ->
live h key /\ live h nonce /\ live h aad /\ live h input /\ live h output /\
disjoint key output /\ disjoint nonce output /\ eq_or_disjoint input output /\
disjoint aad output)
(ensures
fun h0 _ h1 ->
modifies (loc output) h0 h1 /\
(as_seq h1 output)
`Seq.equal`
(AEAD.encrypt #(S.Seal?.alg a)
(as_seq h0 key)
(as_seq h0 nonce)
(as_seq h0 aad)
(as_seq h0 input))) | {
"checked_file": "Hacl.HPKE.Interface.AEAD.fsti.checked",
"dependencies": [
"Spec.Agile.HPKE.fsti.checked",
"Spec.Agile.AEAD.fsti.checked",
"prims.fst.checked",
"Meta.Attribute.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.HPKE.Interface.AEAD.fsti"
} | [
"total"
] | [
"Spec.Agile.HPKE.aead",
"Prims.squash",
"Prims.l_and",
"Prims.b2t",
"Spec.Agile.HPKE.uu___is_Seal",
"Spec.Agile.HPKE.is_valid_aead",
"Hacl.HPKE.Interface.AEAD.kv",
"Spec.Agile.HPKE.__proj__Seal__item__alg",
"Hacl.HPKE.Interface.AEAD.iv",
"Lib.IntTypes.size_t",
"Prims.op_LessThanOrEqual",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Spec.Agile.AEAD.max_length",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"Prims.op_Addition",
"Lib.IntTypes.max_size_t",
"Lib.IntTypes.size",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"Lib.Buffer.live",
"Lib.Buffer.MUT",
"Lib.Buffer.disjoint",
"Lib.Buffer.eq_or_disjoint",
"Lib.Buffer.modifies",
"Lib.Buffer.loc",
"FStar.Seq.Base.equal",
"Lib.Buffer.as_seq",
"Spec.Agile.AEAD.encrypt",
"Spec.Agile.AEAD.key_length",
"FStar.UInt32.__uint_to_t"
] | [] | module Hacl.HPKE.Interface.AEAD
open FStar.HyperStack
open FStar.HyperStack.All
open Lib.IntTypes
open Lib.Buffer
module S = Spec.Agile.HPKE
module AEAD = Spec.Agile.AEAD
inline_for_extraction noextract
let kv (a:AEAD.alg) = lbuffer uint8 (size (AEAD.key_length a))
inline_for_extraction noextract
let iv (a:AEAD.alg) = lbuffer uint8 12ul
inline_for_extraction noextract
let tag (a:AEAD.alg) = lbuffer uint8 (size (AEAD.tag_length a))
inline_for_extraction noextract | false | true | Hacl.HPKE.Interface.AEAD.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aead_encrypt_st : a: Spec.Agile.HPKE.aead -> Type0 | [] | Hacl.HPKE.Interface.AEAD.aead_encrypt_st | {
"file_name": "code/hpke/Hacl.HPKE.Interface.AEAD.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Spec.Agile.HPKE.aead -> Type0 | {
"end_col": 102,
"end_line": 37,
"start_col": 5,
"start_line": 21
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Spec.Agile.AEAD",
"short_module": "AEAD"
},
{
"abbrev": true,
"full_module": "Spec.Agile.HPKE",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.HPKE.Interface",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.HPKE.Interface",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aead_decrypt_st (a:S.aead) =
_:squash (S.Seal? a /\ S.is_valid_aead a)
-> key:kv (S.Seal?.alg a)
-> nonce:iv (S.Seal?.alg a)
-> alen:size_t{v alen <= AEAD.max_length (S.Seal?.alg a)}
-> aad:lbuffer uint8 alen
-> len:size_t{v len <= AEAD.max_length (S.Seal?.alg a) /\ v len + 16 <= max_size_t}
-> input:lbuffer uint8 len
-> output:lbuffer uint8 (size (v len + 16)) ->
Stack UInt32.t
(requires fun h ->
live h key /\ live h nonce /\ live h aad /\
live h input /\ live h output /\
eq_or_disjoint input output)
(ensures fun h0 z h1 -> modifies1 input h0 h1 /\
(let plain = AEAD.decrypt #(S.Seal?.alg a) (as_seq h0 key) (as_seq h0 nonce) (as_seq h0 aad) (as_seq h0 output) in
match z with
| 0ul -> Some? plain /\ as_seq h1 input `Seq.equal` Some?.v plain // decryption succeeded
| 1ul -> None? plain
| _ -> false) // decryption failed
) | let aead_decrypt_st (a: S.aead) = | false | null | false |
_: squash (S.Seal? a /\ S.is_valid_aead a) ->
key: kv (S.Seal?.alg a) ->
nonce: iv (S.Seal?.alg a) ->
alen: size_t{v alen <= AEAD.max_length (S.Seal?.alg a)} ->
aad: lbuffer uint8 alen ->
len: size_t{v len <= AEAD.max_length (S.Seal?.alg a) /\ v len + 16 <= max_size_t} ->
input: lbuffer uint8 len ->
output: lbuffer uint8 (size (v len + 16))
-> Stack UInt32.t
(requires
fun h ->
live h key /\ live h nonce /\ live h aad /\ live h input /\ live h output /\
eq_or_disjoint input output)
(ensures
fun h0 z h1 ->
modifies1 input h0 h1 /\
(let plain =
AEAD.decrypt #(S.Seal?.alg a)
(as_seq h0 key)
(as_seq h0 nonce)
(as_seq h0 aad)
(as_seq h0 output)
in
match z with
| 0ul -> Some? plain /\ (as_seq h1 input) `Seq.equal` (Some?.v plain)
| 1ul -> None? plain
| _ -> false)) | {
"checked_file": "Hacl.HPKE.Interface.AEAD.fsti.checked",
"dependencies": [
"Spec.Agile.HPKE.fsti.checked",
"Spec.Agile.AEAD.fsti.checked",
"prims.fst.checked",
"Meta.Attribute.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Buffer.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Hacl.HPKE.Interface.AEAD.fsti"
} | [
"total"
] | [
"Spec.Agile.HPKE.aead",
"Prims.squash",
"Prims.l_and",
"Prims.b2t",
"Spec.Agile.HPKE.uu___is_Seal",
"Spec.Agile.HPKE.is_valid_aead",
"Hacl.HPKE.Interface.AEAD.kv",
"Spec.Agile.HPKE.__proj__Seal__item__alg",
"Hacl.HPKE.Interface.AEAD.iv",
"Lib.IntTypes.size_t",
"Prims.op_LessThanOrEqual",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Spec.Agile.AEAD.max_length",
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"Prims.op_Addition",
"Lib.IntTypes.max_size_t",
"Lib.IntTypes.size",
"FStar.UInt32.t",
"FStar.Monotonic.HyperStack.mem",
"Lib.Buffer.live",
"Lib.Buffer.MUT",
"Lib.Buffer.eq_or_disjoint",
"Lib.Buffer.modifies1",
"FStar.Pervasives.Native.uu___is_Some",
"Spec.Agile.AEAD.decrypted",
"Lib.Buffer.as_seq",
"FStar.Seq.Base.equal",
"FStar.Pervasives.Native.__proj__Some__item__v",
"FStar.Pervasives.Native.uu___is_None",
"Prims.logical",
"FStar.Pervasives.Native.option",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Lib.IntTypes.mk_int",
"Spec.Agile.AEAD.decrypt",
"Spec.Agile.AEAD.key_length",
"FStar.UInt32.__uint_to_t"
] | [] | module Hacl.HPKE.Interface.AEAD
open FStar.HyperStack
open FStar.HyperStack.All
open Lib.IntTypes
open Lib.Buffer
module S = Spec.Agile.HPKE
module AEAD = Spec.Agile.AEAD
inline_for_extraction noextract
let kv (a:AEAD.alg) = lbuffer uint8 (size (AEAD.key_length a))
inline_for_extraction noextract
let iv (a:AEAD.alg) = lbuffer uint8 12ul
inline_for_extraction noextract
let tag (a:AEAD.alg) = lbuffer uint8 (size (AEAD.tag_length a))
inline_for_extraction noextract
let aead_encrypt_st (a:S.aead) =
_:squash (S.Seal? a /\ S.is_valid_aead a)
-> key:kv (S.Seal?.alg a)
-> nonce:iv (S.Seal?.alg a)
-> alen:size_t{v alen <= AEAD.max_length (S.Seal?.alg a)}
-> aad:lbuffer uint8 alen
-> len:size_t{v len + 16 <= max_size_t}
-> input:lbuffer uint8 len
-> output:lbuffer uint8 (size (v len + 16)) ->
Stack unit
(requires fun h ->
live h key /\ live h nonce /\ live h aad /\
live h input /\ live h output /\
disjoint key output /\ disjoint nonce output /\
eq_or_disjoint input output /\ disjoint aad output)
(ensures fun h0 _ h1 -> modifies (loc output) h0 h1 /\
(as_seq h1 output) `Seq.equal`
AEAD.encrypt #(S.Seal?.alg a) (as_seq h0 key) (as_seq h0 nonce) (as_seq h0 aad) (as_seq h0 input))
inline_for_extraction noextract | false | true | Hacl.HPKE.Interface.AEAD.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aead_decrypt_st : a: Spec.Agile.HPKE.aead -> Type0 | [] | Hacl.HPKE.Interface.AEAD.aead_decrypt_st | {
"file_name": "code/hpke/Hacl.HPKE.Interface.AEAD.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Spec.Agile.HPKE.aead -> Type0 | {
"end_col": 3,
"end_line": 60,
"start_col": 4,
"start_line": 41
} |
|
FStar.Pervasives.Lemma | val a_pow2_64_lemma: #t:Type -> k:SE.concrete_ops t -> a:t ->
Lemma (k.SE.to.SE.refl (a_pow2_64 k a) ==
LE.pow k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) (pow2 64)) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let a_pow2_64_lemma #t k a =
SE.exp_pow2_lemma k a 64;
LE.exp_pow2_lemma k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) 64 | val a_pow2_64_lemma: #t:Type -> k:SE.concrete_ops t -> a:t ->
Lemma (k.SE.to.SE.refl (a_pow2_64 k a) ==
LE.pow k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) (pow2 64))
let a_pow2_64_lemma #t k a = | false | null | true | SE.exp_pow2_lemma k a 64;
LE.exp_pow2_lemma k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) 64 | {
"checked_file": "Hacl.Spec.PrecompBaseTable256.fst.checked",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.PrecompBaseTable256.fst"
} | [
"lemma"
] | [
"Spec.Exponentiation.concrete_ops",
"Lib.Exponentiation.exp_pow2_lemma",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__a_spec",
"Spec.Exponentiation.__proj__Mkconcrete_ops__item__to",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__comm_monoid",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__refl",
"Prims.unit",
"Spec.Exponentiation.exp_pow2_lemma"
] | [] | module Hacl.Spec.PrecompBaseTable256
open FStar.Mul
open Lib.IntTypes
module LSeq = Lib.Sequence
module Loops = Lib.LoopCombinators
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BD = Hacl.Spec.Bignum.Definitions
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let lemma_mod_pow2_sub x a b =
calc (==) {
x / pow2 a % pow2 b * pow2 a;
(==) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
x % pow2 (a + b) / pow2 a * pow2 a;
(==) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
(==) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
}
let lemma_decompose_nat256_as_four_u64 x =
let x0 = x % pow2 64 in
let x1 = x / pow2 64 % pow2 64 in
let x2 = x / pow2 128 % pow2 64 in
let x3' = x / pow2 192 % pow2 64 in
Math.Lemmas.lemma_div_lt x 256 192;
Math.Lemmas.small_mod (x / pow2 192) (pow2 64);
let x3 = x / pow2 192 in
assert (x3 == x3');
calc (==) {
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192;
(==) { }
x0 + x1 * pow2 64 + (x / pow2 128 % pow2 64) * pow2 128 + x / pow2 192 * pow2 192;
(==) { lemma_mod_pow2_sub x 128 64 }
x0 + x1 * pow2 64 + x % pow2 192 - x % pow2 128 + x / pow2 192 * pow2 192;
(==) { Math.Lemmas.euclidean_division_definition x (pow2 192) }
x0 + x1 * pow2 64 - x % pow2 128 + x;
(==) { lemma_mod_pow2_sub x 64 64 }
x;
}
let lemma_point_mul_base_precomp4 #t k a b =
let (b0, b1, b2, b3) = decompose_nat256_as_four_u64 b in
let a_pow2_64 = LE.pow k a (pow2 64) in
let a_pow2_128 = LE.pow k a (pow2 128) in
let a_pow2_192 = LE.pow k a (pow2 192) in
let res = LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 in
calc (==) {
LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4;
(==) { LE.exp_four_fw_lemma k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k (LE.pow k a (pow2 64)) b1))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 64) b1 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k a (b1 * pow2 64)))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a b0 (b1 * pow2 64) }
k.LE.mul
(k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64))
(LE.pow k (LE.pow k a (pow2 128)) b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 128) b2 }
k.LE.mul
(k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k a (b2 * pow2 128)))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64) (b2 * pow2 128) }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k (LE.pow k a (pow2 192)) b3);
(==) { LE.lemma_pow_mul k a (pow2 192) b3 }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k a (b3 * pow2 192));
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64 + b2 * pow2 128) (b3 * pow2 192) }
LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128 + b3 * pow2 192);
(==) { lemma_decompose_nat256_as_four_u64 b }
LE.pow k a b;
}
//-----------------------
#push-options "--fuel 2"
let rec exp_pow2_rec_is_exp_pow2 #t k a b =
if b = 0 then Lib.LoopCombinators.eq_repeat0 k.sqr a
else begin
Lib.LoopCombinators.unfold_repeat b k.sqr a (b - 1);
assert (Loops.repeat b k.sqr a == k.sqr (Loops.repeat (b - 1) k.sqr a));
exp_pow2_rec_is_exp_pow2 k a (b - 1) end
#pop-options | false | false | Hacl.Spec.PrecompBaseTable256.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val a_pow2_64_lemma: #t:Type -> k:SE.concrete_ops t -> a:t ->
Lemma (k.SE.to.SE.refl (a_pow2_64 k a) ==
LE.pow k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) (pow2 64)) | [] | Hacl.Spec.PrecompBaseTable256.a_pow2_64_lemma | {
"file_name": "code/bignum/Hacl.Spec.PrecompBaseTable256.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | k: Spec.Exponentiation.concrete_ops t -> a: t
-> FStar.Pervasives.Lemma
(ensures
Mkto_comm_monoid?.refl (Mkconcrete_ops?.to k) (Hacl.Spec.PrecompBaseTable256.a_pow2_64 k a) ==
Lib.Exponentiation.Definition.pow (Mkto_comm_monoid?.comm_monoid (Mkconcrete_ops?.to k))
(Mkto_comm_monoid?.refl (Mkconcrete_ops?.to k) a)
(Prims.pow2 64)) | {
"end_col": 65,
"end_line": 107,
"start_col": 2,
"start_line": 106
} |
FStar.Pervasives.Lemma | val exp_pow2_rec_is_exp_pow2: #t:Type -> k:SE.concrete_ops t -> a:t -> b:nat ->
Lemma (exp_pow2_rec k a b == SE.exp_pow2 k a b) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec exp_pow2_rec_is_exp_pow2 #t k a b =
if b = 0 then Lib.LoopCombinators.eq_repeat0 k.sqr a
else begin
Lib.LoopCombinators.unfold_repeat b k.sqr a (b - 1);
assert (Loops.repeat b k.sqr a == k.sqr (Loops.repeat (b - 1) k.sqr a));
exp_pow2_rec_is_exp_pow2 k a (b - 1) end | val exp_pow2_rec_is_exp_pow2: #t:Type -> k:SE.concrete_ops t -> a:t -> b:nat ->
Lemma (exp_pow2_rec k a b == SE.exp_pow2 k a b)
let rec exp_pow2_rec_is_exp_pow2 #t k a b = | false | null | true | if b = 0
then Lib.LoopCombinators.eq_repeat0 k.sqr a
else
(Lib.LoopCombinators.unfold_repeat b k.sqr a (b - 1);
assert (Loops.repeat b k.sqr a == k.sqr (Loops.repeat (b - 1) k.sqr a));
exp_pow2_rec_is_exp_pow2 k a (b - 1)) | {
"checked_file": "Hacl.Spec.PrecompBaseTable256.fst.checked",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.PrecompBaseTable256.fst"
} | [
"lemma"
] | [
"Spec.Exponentiation.concrete_ops",
"Prims.nat",
"Prims.op_Equality",
"Prims.int",
"Lib.LoopCombinators.eq_repeat0",
"Spec.Exponentiation.__proj__Mkconcrete_ops__item__sqr",
"Prims.bool",
"Hacl.Spec.PrecompBaseTable256.exp_pow2_rec_is_exp_pow2",
"Prims.op_Subtraction",
"Prims.unit",
"Prims._assert",
"Prims.eq2",
"Lib.LoopCombinators.repeat",
"Lib.LoopCombinators.unfold_repeat"
] | [] | module Hacl.Spec.PrecompBaseTable256
open FStar.Mul
open Lib.IntTypes
module LSeq = Lib.Sequence
module Loops = Lib.LoopCombinators
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BD = Hacl.Spec.Bignum.Definitions
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let lemma_mod_pow2_sub x a b =
calc (==) {
x / pow2 a % pow2 b * pow2 a;
(==) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
x % pow2 (a + b) / pow2 a * pow2 a;
(==) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
(==) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
}
let lemma_decompose_nat256_as_four_u64 x =
let x0 = x % pow2 64 in
let x1 = x / pow2 64 % pow2 64 in
let x2 = x / pow2 128 % pow2 64 in
let x3' = x / pow2 192 % pow2 64 in
Math.Lemmas.lemma_div_lt x 256 192;
Math.Lemmas.small_mod (x / pow2 192) (pow2 64);
let x3 = x / pow2 192 in
assert (x3 == x3');
calc (==) {
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192;
(==) { }
x0 + x1 * pow2 64 + (x / pow2 128 % pow2 64) * pow2 128 + x / pow2 192 * pow2 192;
(==) { lemma_mod_pow2_sub x 128 64 }
x0 + x1 * pow2 64 + x % pow2 192 - x % pow2 128 + x / pow2 192 * pow2 192;
(==) { Math.Lemmas.euclidean_division_definition x (pow2 192) }
x0 + x1 * pow2 64 - x % pow2 128 + x;
(==) { lemma_mod_pow2_sub x 64 64 }
x;
}
let lemma_point_mul_base_precomp4 #t k a b =
let (b0, b1, b2, b3) = decompose_nat256_as_four_u64 b in
let a_pow2_64 = LE.pow k a (pow2 64) in
let a_pow2_128 = LE.pow k a (pow2 128) in
let a_pow2_192 = LE.pow k a (pow2 192) in
let res = LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 in
calc (==) {
LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4;
(==) { LE.exp_four_fw_lemma k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k (LE.pow k a (pow2 64)) b1))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 64) b1 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k a (b1 * pow2 64)))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a b0 (b1 * pow2 64) }
k.LE.mul
(k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64))
(LE.pow k (LE.pow k a (pow2 128)) b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 128) b2 }
k.LE.mul
(k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k a (b2 * pow2 128)))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64) (b2 * pow2 128) }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k (LE.pow k a (pow2 192)) b3);
(==) { LE.lemma_pow_mul k a (pow2 192) b3 }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k a (b3 * pow2 192));
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64 + b2 * pow2 128) (b3 * pow2 192) }
LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128 + b3 * pow2 192);
(==) { lemma_decompose_nat256_as_four_u64 b }
LE.pow k a b;
}
//-----------------------
#push-options "--fuel 2" | false | false | Hacl.Spec.PrecompBaseTable256.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 2,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val exp_pow2_rec_is_exp_pow2: #t:Type -> k:SE.concrete_ops t -> a:t -> b:nat ->
Lemma (exp_pow2_rec k a b == SE.exp_pow2 k a b) | [
"recursion"
] | Hacl.Spec.PrecompBaseTable256.exp_pow2_rec_is_exp_pow2 | {
"file_name": "code/bignum/Hacl.Spec.PrecompBaseTable256.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | k: Spec.Exponentiation.concrete_ops t -> a: t -> b: Prims.nat
-> FStar.Pervasives.Lemma
(ensures Hacl.Spec.PrecompBaseTable256.exp_pow2_rec k a b == Spec.Exponentiation.exp_pow2 k a b) | {
"end_col": 44,
"end_line": 101,
"start_col": 2,
"start_line": 97
} |
FStar.Pervasives.Lemma | val lemma_mod_pow2_sub: x:nat -> a:nat -> b:nat ->
Lemma (x / pow2 a % pow2 b * pow2 a == x % pow2 (a + b) - x % pow2 a) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lemma_mod_pow2_sub x a b =
calc (==) {
x / pow2 a % pow2 b * pow2 a;
(==) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
x % pow2 (a + b) / pow2 a * pow2 a;
(==) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
(==) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
} | val lemma_mod_pow2_sub: x:nat -> a:nat -> b:nat ->
Lemma (x / pow2 a % pow2 b * pow2 a == x % pow2 (a + b) - x % pow2 a)
let lemma_mod_pow2_sub x a b = | false | null | true | calc ( == ) {
(x / pow2 a % pow2 b) * pow2 a;
( == ) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
(x % pow2 (a + b) / pow2 a) * pow2 a;
( == ) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
( == ) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
} | {
"checked_file": "Hacl.Spec.PrecompBaseTable256.fst.checked",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.PrecompBaseTable256.fst"
} | [
"lemma"
] | [
"Prims.nat",
"FStar.Calc.calc_finish",
"Prims.int",
"Prims.eq2",
"FStar.Mul.op_Star",
"Prims.op_Modulus",
"Prims.op_Division",
"Prims.pow2",
"Prims.op_Subtraction",
"Prims.op_Addition",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"Prims.unit",
"FStar.Calc.calc_step",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"FStar.Math.Lemmas.pow2_modulo_division_lemma_1",
"Prims.squash",
"FStar.Math.Lemmas.euclidean_division_definition",
"FStar.Math.Lemmas.pow2_modulo_modulo_lemma_1"
] | [] | module Hacl.Spec.PrecompBaseTable256
open FStar.Mul
open Lib.IntTypes
module LSeq = Lib.Sequence
module Loops = Lib.LoopCombinators
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BD = Hacl.Spec.Bignum.Definitions
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0" | false | false | Hacl.Spec.PrecompBaseTable256.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lemma_mod_pow2_sub: x:nat -> a:nat -> b:nat ->
Lemma (x / pow2 a % pow2 b * pow2 a == x % pow2 (a + b) - x % pow2 a) | [] | Hacl.Spec.PrecompBaseTable256.lemma_mod_pow2_sub | {
"file_name": "code/bignum/Hacl.Spec.PrecompBaseTable256.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Prims.nat -> a: Prims.nat -> b: Prims.nat
-> FStar.Pervasives.Lemma
(ensures
(x / Prims.pow2 a % Prims.pow2 b) * Prims.pow2 a == x % Prims.pow2 (a + b) - x % Prims.pow2 a) | {
"end_col": 3,
"end_line": 23,
"start_col": 2,
"start_line": 15
} |
FStar.Pervasives.Lemma | val a_pow2_128_lemma: #t:Type -> k:SE.concrete_ops t -> a:t ->
Lemma (k.SE.to.SE.refl (a_pow2_128 k a) ==
LE.pow k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) (pow2 128)) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let a_pow2_128_lemma #t k a =
let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc (==) {
refl (a_pow2_128 k a);
(==) { }
refl (SE.exp_pow2 k (a_pow2_64 k a) 64);
(==) { a_pow2_64_lemma k (a_pow2_64 k a) }
LE.pow cm (refl (a_pow2_64 k a)) (pow2 64);
(==) { a_pow2_64_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 64)) (pow2 64);
(==) { LE.lemma_pow_mul cm (refl a) (pow2 64) (pow2 64) }
LE.pow cm (refl a) (pow2 64 * pow2 64);
(==) { Math.Lemmas.pow2_plus 64 64 }
LE.pow cm (refl a) (pow2 128);
} | val a_pow2_128_lemma: #t:Type -> k:SE.concrete_ops t -> a:t ->
Lemma (k.SE.to.SE.refl (a_pow2_128 k a) ==
LE.pow k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) (pow2 128))
let a_pow2_128_lemma #t k a = | false | null | true | let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc ( == ) {
refl (a_pow2_128 k a);
( == ) { () }
refl (SE.exp_pow2 k (a_pow2_64 k a) 64);
( == ) { a_pow2_64_lemma k (a_pow2_64 k a) }
LE.pow cm (refl (a_pow2_64 k a)) (pow2 64);
( == ) { a_pow2_64_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 64)) (pow2 64);
( == ) { LE.lemma_pow_mul cm (refl a) (pow2 64) (pow2 64) }
LE.pow cm (refl a) (pow2 64 * pow2 64);
( == ) { Math.Lemmas.pow2_plus 64 64 }
LE.pow cm (refl a) (pow2 128);
} | {
"checked_file": "Hacl.Spec.PrecompBaseTable256.fst.checked",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.PrecompBaseTable256.fst"
} | [
"lemma"
] | [
"Spec.Exponentiation.concrete_ops",
"FStar.Calc.calc_finish",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__a_spec",
"Spec.Exponentiation.__proj__Mkconcrete_ops__item__to",
"Prims.eq2",
"Hacl.Spec.PrecompBaseTable256.a_pow2_128",
"Lib.Exponentiation.Definition.pow",
"Prims.pow2",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"Prims.unit",
"FStar.Calc.calc_step",
"FStar.Mul.op_Star",
"Hacl.Spec.PrecompBaseTable256.a_pow2_64",
"Spec.Exponentiation.exp_pow2",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"Prims.squash",
"Hacl.Spec.PrecompBaseTable256.a_pow2_64_lemma",
"Lib.Exponentiation.Definition.lemma_pow_mul",
"FStar.Math.Lemmas.pow2_plus",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__refl",
"Lib.Exponentiation.Definition.comm_monoid",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__comm_monoid"
] | [] | module Hacl.Spec.PrecompBaseTable256
open FStar.Mul
open Lib.IntTypes
module LSeq = Lib.Sequence
module Loops = Lib.LoopCombinators
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BD = Hacl.Spec.Bignum.Definitions
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let lemma_mod_pow2_sub x a b =
calc (==) {
x / pow2 a % pow2 b * pow2 a;
(==) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
x % pow2 (a + b) / pow2 a * pow2 a;
(==) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
(==) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
}
let lemma_decompose_nat256_as_four_u64 x =
let x0 = x % pow2 64 in
let x1 = x / pow2 64 % pow2 64 in
let x2 = x / pow2 128 % pow2 64 in
let x3' = x / pow2 192 % pow2 64 in
Math.Lemmas.lemma_div_lt x 256 192;
Math.Lemmas.small_mod (x / pow2 192) (pow2 64);
let x3 = x / pow2 192 in
assert (x3 == x3');
calc (==) {
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192;
(==) { }
x0 + x1 * pow2 64 + (x / pow2 128 % pow2 64) * pow2 128 + x / pow2 192 * pow2 192;
(==) { lemma_mod_pow2_sub x 128 64 }
x0 + x1 * pow2 64 + x % pow2 192 - x % pow2 128 + x / pow2 192 * pow2 192;
(==) { Math.Lemmas.euclidean_division_definition x (pow2 192) }
x0 + x1 * pow2 64 - x % pow2 128 + x;
(==) { lemma_mod_pow2_sub x 64 64 }
x;
}
let lemma_point_mul_base_precomp4 #t k a b =
let (b0, b1, b2, b3) = decompose_nat256_as_four_u64 b in
let a_pow2_64 = LE.pow k a (pow2 64) in
let a_pow2_128 = LE.pow k a (pow2 128) in
let a_pow2_192 = LE.pow k a (pow2 192) in
let res = LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 in
calc (==) {
LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4;
(==) { LE.exp_four_fw_lemma k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k (LE.pow k a (pow2 64)) b1))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 64) b1 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k a (b1 * pow2 64)))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a b0 (b1 * pow2 64) }
k.LE.mul
(k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64))
(LE.pow k (LE.pow k a (pow2 128)) b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 128) b2 }
k.LE.mul
(k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k a (b2 * pow2 128)))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64) (b2 * pow2 128) }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k (LE.pow k a (pow2 192)) b3);
(==) { LE.lemma_pow_mul k a (pow2 192) b3 }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k a (b3 * pow2 192));
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64 + b2 * pow2 128) (b3 * pow2 192) }
LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128 + b3 * pow2 192);
(==) { lemma_decompose_nat256_as_four_u64 b }
LE.pow k a b;
}
//-----------------------
#push-options "--fuel 2"
let rec exp_pow2_rec_is_exp_pow2 #t k a b =
if b = 0 then Lib.LoopCombinators.eq_repeat0 k.sqr a
else begin
Lib.LoopCombinators.unfold_repeat b k.sqr a (b - 1);
assert (Loops.repeat b k.sqr a == k.sqr (Loops.repeat (b - 1) k.sqr a));
exp_pow2_rec_is_exp_pow2 k a (b - 1) end
#pop-options
let a_pow2_64_lemma #t k a =
SE.exp_pow2_lemma k a 64;
LE.exp_pow2_lemma k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) 64 | false | false | Hacl.Spec.PrecompBaseTable256.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val a_pow2_128_lemma: #t:Type -> k:SE.concrete_ops t -> a:t ->
Lemma (k.SE.to.SE.refl (a_pow2_128 k a) ==
LE.pow k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) (pow2 128)) | [] | Hacl.Spec.PrecompBaseTable256.a_pow2_128_lemma | {
"file_name": "code/bignum/Hacl.Spec.PrecompBaseTable256.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | k: Spec.Exponentiation.concrete_ops t -> a: t
-> FStar.Pervasives.Lemma
(ensures
Mkto_comm_monoid?.refl (Mkconcrete_ops?.to k) (Hacl.Spec.PrecompBaseTable256.a_pow2_128 k a) ==
Lib.Exponentiation.Definition.pow (Mkto_comm_monoid?.comm_monoid (Mkconcrete_ops?.to k))
(Mkto_comm_monoid?.refl (Mkconcrete_ops?.to k) a)
(Prims.pow2 128)) | {
"end_col": 3,
"end_line": 125,
"start_col": 29,
"start_line": 110
} |
FStar.Pervasives.Lemma | val a_pow2_192_lemma: #t:Type -> k:SE.concrete_ops t -> a:t ->
Lemma (k.SE.to.SE.refl (a_pow2_192 k a) ==
LE.pow k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) (pow2 192)) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let a_pow2_192_lemma #t k a =
let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc (==) {
refl (a_pow2_192 k a);
(==) { }
refl (SE.exp_pow2 k (a_pow2_128 k a) 64);
(==) { a_pow2_64_lemma k (a_pow2_128 k a) }
LE.pow cm (refl (a_pow2_128 k a)) (pow2 64);
(==) { a_pow2_128_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 128)) (pow2 64);
(==) { LE.lemma_pow_mul cm (refl a) (pow2 128) (pow2 64) }
LE.pow cm (refl a) (pow2 128 * pow2 64);
(==) { Math.Lemmas.pow2_plus 128 64 }
LE.pow cm (refl a) (pow2 192);
} | val a_pow2_192_lemma: #t:Type -> k:SE.concrete_ops t -> a:t ->
Lemma (k.SE.to.SE.refl (a_pow2_192 k a) ==
LE.pow k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) (pow2 192))
let a_pow2_192_lemma #t k a = | false | null | true | let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc ( == ) {
refl (a_pow2_192 k a);
( == ) { () }
refl (SE.exp_pow2 k (a_pow2_128 k a) 64);
( == ) { a_pow2_64_lemma k (a_pow2_128 k a) }
LE.pow cm (refl (a_pow2_128 k a)) (pow2 64);
( == ) { a_pow2_128_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 128)) (pow2 64);
( == ) { LE.lemma_pow_mul cm (refl a) (pow2 128) (pow2 64) }
LE.pow cm (refl a) (pow2 128 * pow2 64);
( == ) { Math.Lemmas.pow2_plus 128 64 }
LE.pow cm (refl a) (pow2 192);
} | {
"checked_file": "Hacl.Spec.PrecompBaseTable256.fst.checked",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.PrecompBaseTable256.fst"
} | [
"lemma"
] | [
"Spec.Exponentiation.concrete_ops",
"FStar.Calc.calc_finish",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__a_spec",
"Spec.Exponentiation.__proj__Mkconcrete_ops__item__to",
"Prims.eq2",
"Hacl.Spec.PrecompBaseTable256.a_pow2_192",
"Lib.Exponentiation.Definition.pow",
"Prims.pow2",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"Prims.unit",
"FStar.Calc.calc_step",
"FStar.Mul.op_Star",
"Hacl.Spec.PrecompBaseTable256.a_pow2_128",
"Spec.Exponentiation.exp_pow2",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"Prims.squash",
"Hacl.Spec.PrecompBaseTable256.a_pow2_64_lemma",
"Hacl.Spec.PrecompBaseTable256.a_pow2_128_lemma",
"Lib.Exponentiation.Definition.lemma_pow_mul",
"FStar.Math.Lemmas.pow2_plus",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__refl",
"Lib.Exponentiation.Definition.comm_monoid",
"Spec.Exponentiation.__proj__Mkto_comm_monoid__item__comm_monoid"
] | [] | module Hacl.Spec.PrecompBaseTable256
open FStar.Mul
open Lib.IntTypes
module LSeq = Lib.Sequence
module Loops = Lib.LoopCombinators
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BD = Hacl.Spec.Bignum.Definitions
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let lemma_mod_pow2_sub x a b =
calc (==) {
x / pow2 a % pow2 b * pow2 a;
(==) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
x % pow2 (a + b) / pow2 a * pow2 a;
(==) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
(==) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
}
let lemma_decompose_nat256_as_four_u64 x =
let x0 = x % pow2 64 in
let x1 = x / pow2 64 % pow2 64 in
let x2 = x / pow2 128 % pow2 64 in
let x3' = x / pow2 192 % pow2 64 in
Math.Lemmas.lemma_div_lt x 256 192;
Math.Lemmas.small_mod (x / pow2 192) (pow2 64);
let x3 = x / pow2 192 in
assert (x3 == x3');
calc (==) {
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192;
(==) { }
x0 + x1 * pow2 64 + (x / pow2 128 % pow2 64) * pow2 128 + x / pow2 192 * pow2 192;
(==) { lemma_mod_pow2_sub x 128 64 }
x0 + x1 * pow2 64 + x % pow2 192 - x % pow2 128 + x / pow2 192 * pow2 192;
(==) { Math.Lemmas.euclidean_division_definition x (pow2 192) }
x0 + x1 * pow2 64 - x % pow2 128 + x;
(==) { lemma_mod_pow2_sub x 64 64 }
x;
}
let lemma_point_mul_base_precomp4 #t k a b =
let (b0, b1, b2, b3) = decompose_nat256_as_four_u64 b in
let a_pow2_64 = LE.pow k a (pow2 64) in
let a_pow2_128 = LE.pow k a (pow2 128) in
let a_pow2_192 = LE.pow k a (pow2 192) in
let res = LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 in
calc (==) {
LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4;
(==) { LE.exp_four_fw_lemma k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k (LE.pow k a (pow2 64)) b1))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 64) b1 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k a (b1 * pow2 64)))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a b0 (b1 * pow2 64) }
k.LE.mul
(k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64))
(LE.pow k (LE.pow k a (pow2 128)) b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 128) b2 }
k.LE.mul
(k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k a (b2 * pow2 128)))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64) (b2 * pow2 128) }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k (LE.pow k a (pow2 192)) b3);
(==) { LE.lemma_pow_mul k a (pow2 192) b3 }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k a (b3 * pow2 192));
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64 + b2 * pow2 128) (b3 * pow2 192) }
LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128 + b3 * pow2 192);
(==) { lemma_decompose_nat256_as_four_u64 b }
LE.pow k a b;
}
//-----------------------
#push-options "--fuel 2"
let rec exp_pow2_rec_is_exp_pow2 #t k a b =
if b = 0 then Lib.LoopCombinators.eq_repeat0 k.sqr a
else begin
Lib.LoopCombinators.unfold_repeat b k.sqr a (b - 1);
assert (Loops.repeat b k.sqr a == k.sqr (Loops.repeat (b - 1) k.sqr a));
exp_pow2_rec_is_exp_pow2 k a (b - 1) end
#pop-options
let a_pow2_64_lemma #t k a =
SE.exp_pow2_lemma k a 64;
LE.exp_pow2_lemma k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) 64
let a_pow2_128_lemma #t k a =
let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc (==) {
refl (a_pow2_128 k a);
(==) { }
refl (SE.exp_pow2 k (a_pow2_64 k a) 64);
(==) { a_pow2_64_lemma k (a_pow2_64 k a) }
LE.pow cm (refl (a_pow2_64 k a)) (pow2 64);
(==) { a_pow2_64_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 64)) (pow2 64);
(==) { LE.lemma_pow_mul cm (refl a) (pow2 64) (pow2 64) }
LE.pow cm (refl a) (pow2 64 * pow2 64);
(==) { Math.Lemmas.pow2_plus 64 64 }
LE.pow cm (refl a) (pow2 128);
} | false | false | Hacl.Spec.PrecompBaseTable256.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val a_pow2_192_lemma: #t:Type -> k:SE.concrete_ops t -> a:t ->
Lemma (k.SE.to.SE.refl (a_pow2_192 k a) ==
LE.pow k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) (pow2 192)) | [] | Hacl.Spec.PrecompBaseTable256.a_pow2_192_lemma | {
"file_name": "code/bignum/Hacl.Spec.PrecompBaseTable256.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | k: Spec.Exponentiation.concrete_ops t -> a: t
-> FStar.Pervasives.Lemma
(ensures
Mkto_comm_monoid?.refl (Mkconcrete_ops?.to k) (Hacl.Spec.PrecompBaseTable256.a_pow2_192 k a) ==
Lib.Exponentiation.Definition.pow (Mkto_comm_monoid?.comm_monoid (Mkconcrete_ops?.to k))
(Mkto_comm_monoid?.refl (Mkconcrete_ops?.to k) a)
(Prims.pow2 192)) | {
"end_col": 3,
"end_line": 143,
"start_col": 29,
"start_line": 128
} |
FStar.Pervasives.Lemma | val lemma_decompose_nat256_as_four_u64_lbignum: x:BD.lbignum U64 4{BD.bn_v x < pow2 256} ->
Lemma (let (x0, x1, x2, x3) = decompose_nat256_as_four_u64 (BD.bn_v x) in
BD.bn_v (LSeq.sub x 0 1) == x0 /\
BD.bn_v (LSeq.sub x 1 1) == x1 /\
BD.bn_v (LSeq.sub x 2 1) == x2 /\
BD.bn_v (LSeq.sub x 3 1) == x3) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lemma_decompose_nat256_as_four_u64_lbignum x =
let open Lib.Sequence in
let bn_x0 = LSeq.sub x 0 1 in
let bn_x1 = LSeq.sub x 1 1 in
let bn_x2 = LSeq.sub x 2 1 in
let bn_x3 = LSeq.sub x 3 1 in
assert_norm (pow2 0 = 1);
BD.bn_eval1 bn_x0;
BD.bn_eval_index x 0;
BD.bn_eval1 bn_x1;
BD.bn_eval_index x 1;
BD.bn_eval1 bn_x2;
BD.bn_eval_index x 2;
BD.bn_eval1 bn_x3;
BD.bn_eval_index x 3 | val lemma_decompose_nat256_as_four_u64_lbignum: x:BD.lbignum U64 4{BD.bn_v x < pow2 256} ->
Lemma (let (x0, x1, x2, x3) = decompose_nat256_as_four_u64 (BD.bn_v x) in
BD.bn_v (LSeq.sub x 0 1) == x0 /\
BD.bn_v (LSeq.sub x 1 1) == x1 /\
BD.bn_v (LSeq.sub x 2 1) == x2 /\
BD.bn_v (LSeq.sub x 3 1) == x3)
let lemma_decompose_nat256_as_four_u64_lbignum x = | false | null | true | let open Lib.Sequence in
let bn_x0 = LSeq.sub x 0 1 in
let bn_x1 = LSeq.sub x 1 1 in
let bn_x2 = LSeq.sub x 2 1 in
let bn_x3 = LSeq.sub x 3 1 in
assert_norm (pow2 0 = 1);
BD.bn_eval1 bn_x0;
BD.bn_eval_index x 0;
BD.bn_eval1 bn_x1;
BD.bn_eval_index x 1;
BD.bn_eval1 bn_x2;
BD.bn_eval_index x 2;
BD.bn_eval1 bn_x3;
BD.bn_eval_index x 3 | {
"checked_file": "Hacl.Spec.PrecompBaseTable256.fst.checked",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.PrecompBaseTable256.fst"
} | [
"lemma"
] | [
"Hacl.Spec.Bignum.Definitions.lbignum",
"Lib.IntTypes.U64",
"Prims.b2t",
"Prims.op_LessThan",
"Hacl.Spec.Bignum.Definitions.bn_v",
"Prims.pow2",
"Hacl.Spec.Bignum.Definitions.bn_eval_index",
"Prims.unit",
"Hacl.Spec.Bignum.Definitions.bn_eval1",
"FStar.Pervasives.assert_norm",
"Prims.op_Equality",
"Prims.int",
"Lib.Sequence.lseq",
"Hacl.Spec.Bignum.Definitions.limb",
"Prims.l_and",
"Prims.eq2",
"FStar.Seq.Base.seq",
"Lib.Sequence.to_seq",
"FStar.Seq.Base.slice",
"Prims.op_Addition",
"Prims.l_Forall",
"Prims.nat",
"Prims.l_or",
"FStar.Seq.Base.index",
"Lib.Sequence.index",
"Lib.Sequence.sub"
] | [] | module Hacl.Spec.PrecompBaseTable256
open FStar.Mul
open Lib.IntTypes
module LSeq = Lib.Sequence
module Loops = Lib.LoopCombinators
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BD = Hacl.Spec.Bignum.Definitions
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let lemma_mod_pow2_sub x a b =
calc (==) {
x / pow2 a % pow2 b * pow2 a;
(==) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
x % pow2 (a + b) / pow2 a * pow2 a;
(==) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
(==) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
}
let lemma_decompose_nat256_as_four_u64 x =
let x0 = x % pow2 64 in
let x1 = x / pow2 64 % pow2 64 in
let x2 = x / pow2 128 % pow2 64 in
let x3' = x / pow2 192 % pow2 64 in
Math.Lemmas.lemma_div_lt x 256 192;
Math.Lemmas.small_mod (x / pow2 192) (pow2 64);
let x3 = x / pow2 192 in
assert (x3 == x3');
calc (==) {
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192;
(==) { }
x0 + x1 * pow2 64 + (x / pow2 128 % pow2 64) * pow2 128 + x / pow2 192 * pow2 192;
(==) { lemma_mod_pow2_sub x 128 64 }
x0 + x1 * pow2 64 + x % pow2 192 - x % pow2 128 + x / pow2 192 * pow2 192;
(==) { Math.Lemmas.euclidean_division_definition x (pow2 192) }
x0 + x1 * pow2 64 - x % pow2 128 + x;
(==) { lemma_mod_pow2_sub x 64 64 }
x;
}
let lemma_point_mul_base_precomp4 #t k a b =
let (b0, b1, b2, b3) = decompose_nat256_as_four_u64 b in
let a_pow2_64 = LE.pow k a (pow2 64) in
let a_pow2_128 = LE.pow k a (pow2 128) in
let a_pow2_192 = LE.pow k a (pow2 192) in
let res = LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 in
calc (==) {
LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4;
(==) { LE.exp_four_fw_lemma k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k (LE.pow k a (pow2 64)) b1))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 64) b1 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k a (b1 * pow2 64)))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a b0 (b1 * pow2 64) }
k.LE.mul
(k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64))
(LE.pow k (LE.pow k a (pow2 128)) b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 128) b2 }
k.LE.mul
(k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k a (b2 * pow2 128)))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64) (b2 * pow2 128) }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k (LE.pow k a (pow2 192)) b3);
(==) { LE.lemma_pow_mul k a (pow2 192) b3 }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k a (b3 * pow2 192));
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64 + b2 * pow2 128) (b3 * pow2 192) }
LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128 + b3 * pow2 192);
(==) { lemma_decompose_nat256_as_four_u64 b }
LE.pow k a b;
}
//-----------------------
#push-options "--fuel 2"
let rec exp_pow2_rec_is_exp_pow2 #t k a b =
if b = 0 then Lib.LoopCombinators.eq_repeat0 k.sqr a
else begin
Lib.LoopCombinators.unfold_repeat b k.sqr a (b - 1);
assert (Loops.repeat b k.sqr a == k.sqr (Loops.repeat (b - 1) k.sqr a));
exp_pow2_rec_is_exp_pow2 k a (b - 1) end
#pop-options
let a_pow2_64_lemma #t k a =
SE.exp_pow2_lemma k a 64;
LE.exp_pow2_lemma k.SE.to.SE.comm_monoid (k.SE.to.SE.refl a) 64
let a_pow2_128_lemma #t k a =
let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc (==) {
refl (a_pow2_128 k a);
(==) { }
refl (SE.exp_pow2 k (a_pow2_64 k a) 64);
(==) { a_pow2_64_lemma k (a_pow2_64 k a) }
LE.pow cm (refl (a_pow2_64 k a)) (pow2 64);
(==) { a_pow2_64_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 64)) (pow2 64);
(==) { LE.lemma_pow_mul cm (refl a) (pow2 64) (pow2 64) }
LE.pow cm (refl a) (pow2 64 * pow2 64);
(==) { Math.Lemmas.pow2_plus 64 64 }
LE.pow cm (refl a) (pow2 128);
}
let a_pow2_192_lemma #t k a =
let cm = k.SE.to.SE.comm_monoid in
let refl = k.SE.to.SE.refl in
calc (==) {
refl (a_pow2_192 k a);
(==) { }
refl (SE.exp_pow2 k (a_pow2_128 k a) 64);
(==) { a_pow2_64_lemma k (a_pow2_128 k a) }
LE.pow cm (refl (a_pow2_128 k a)) (pow2 64);
(==) { a_pow2_128_lemma k a }
LE.pow cm (LE.pow cm (refl a) (pow2 128)) (pow2 64);
(==) { LE.lemma_pow_mul cm (refl a) (pow2 128) (pow2 64) }
LE.pow cm (refl a) (pow2 128 * pow2 64);
(==) { Math.Lemmas.pow2_plus 128 64 }
LE.pow cm (refl a) (pow2 192);
} | false | false | Hacl.Spec.PrecompBaseTable256.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lemma_decompose_nat256_as_four_u64_lbignum: x:BD.lbignum U64 4{BD.bn_v x < pow2 256} ->
Lemma (let (x0, x1, x2, x3) = decompose_nat256_as_four_u64 (BD.bn_v x) in
BD.bn_v (LSeq.sub x 0 1) == x0 /\
BD.bn_v (LSeq.sub x 1 1) == x1 /\
BD.bn_v (LSeq.sub x 2 1) == x2 /\
BD.bn_v (LSeq.sub x 3 1) == x3) | [] | Hacl.Spec.PrecompBaseTable256.lemma_decompose_nat256_as_four_u64_lbignum | {
"file_name": "code/bignum/Hacl.Spec.PrecompBaseTable256.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
x:
Hacl.Spec.Bignum.Definitions.lbignum Lib.IntTypes.U64 4
{Hacl.Spec.Bignum.Definitions.bn_v x < Prims.pow2 256}
-> FStar.Pervasives.Lemma
(ensures
(let _ =
Hacl.Spec.PrecompBaseTable256.decompose_nat256_as_four_u64 (Hacl.Spec.Bignum.Definitions.bn_v
x)
in
(let FStar.Pervasives.Native.Mktuple4 #_ #_ #_ #_ x0 x1 x2 x3 = _ in
Hacl.Spec.Bignum.Definitions.bn_v (Lib.Sequence.sub x 0 1) == x0 /\
Hacl.Spec.Bignum.Definitions.bn_v (Lib.Sequence.sub x 1 1) == x1 /\
Hacl.Spec.Bignum.Definitions.bn_v (Lib.Sequence.sub x 2 1) == x2 /\
Hacl.Spec.Bignum.Definitions.bn_v (Lib.Sequence.sub x 3 1) == x3)
<:
Type0)) | {
"end_col": 22,
"end_line": 163,
"start_col": 2,
"start_line": 147
} |
FStar.Pervasives.Lemma | val lemma_point_mul_base_precomp4: #t:Type -> k:LE.comm_monoid t -> a:t -> b:nat{b < pow2 256} ->
Lemma (exp_as_exp_four_nat256_precomp k a b == LE.pow k a b) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lemma_point_mul_base_precomp4 #t k a b =
let (b0, b1, b2, b3) = decompose_nat256_as_four_u64 b in
let a_pow2_64 = LE.pow k a (pow2 64) in
let a_pow2_128 = LE.pow k a (pow2 128) in
let a_pow2_192 = LE.pow k a (pow2 192) in
let res = LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 in
calc (==) {
LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4;
(==) { LE.exp_four_fw_lemma k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k (LE.pow k a (pow2 64)) b1))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 64) b1 }
k.LE.mul
(k.LE.mul
(k.LE.mul (LE.pow k a b0) (LE.pow k a (b1 * pow2 64)))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a b0 (b1 * pow2 64) }
k.LE.mul
(k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64))
(LE.pow k (LE.pow k a (pow2 128)) b2))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_mul k a (pow2 128) b2 }
k.LE.mul
(k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k a (b2 * pow2 128)))
(LE.pow k a_pow2_192 b3);
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64) (b2 * pow2 128) }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k (LE.pow k a (pow2 192)) b3);
(==) { LE.lemma_pow_mul k a (pow2 192) b3 }
k.LE.mul
(LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128))
(LE.pow k a (b3 * pow2 192));
(==) { LE.lemma_pow_add k a (b0 + b1 * pow2 64 + b2 * pow2 128) (b3 * pow2 192) }
LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128 + b3 * pow2 192);
(==) { lemma_decompose_nat256_as_four_u64 b }
LE.pow k a b;
} | val lemma_point_mul_base_precomp4: #t:Type -> k:LE.comm_monoid t -> a:t -> b:nat{b < pow2 256} ->
Lemma (exp_as_exp_four_nat256_precomp k a b == LE.pow k a b)
let lemma_point_mul_base_precomp4 #t k a b = | false | null | true | let b0, b1, b2, b3 = decompose_nat256_as_four_u64 b in
let a_pow2_64 = LE.pow k a (pow2 64) in
let a_pow2_128 = LE.pow k a (pow2 128) in
let a_pow2_192 = LE.pow k a (pow2 192) in
let res = LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 in
calc ( == ) {
LE.exp_four_fw k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4;
( == ) { LE.exp_four_fw_lemma k a 64 b0 a_pow2_64 b1 a_pow2_128 b2 a_pow2_192 b3 4 }
k.LE.mul (k.LE.mul (k.LE.mul (LE.pow k a b0) (LE.pow k (LE.pow k a (pow2 64)) b1))
(LE.pow k a_pow2_128 b2))
(LE.pow k a_pow2_192 b3);
( == ) { LE.lemma_pow_mul k a (pow2 64) b1 }
k.LE.mul (k.LE.mul (k.LE.mul (LE.pow k a b0) (LE.pow k a (b1 * pow2 64))) (LE.pow k a_pow2_128 b2)
)
(LE.pow k a_pow2_192 b3);
( == ) { LE.lemma_pow_add k a b0 (b1 * pow2 64) }
k.LE.mul (k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k (LE.pow k a (pow2 128)) b2))
(LE.pow k a_pow2_192 b3);
( == ) { LE.lemma_pow_mul k a (pow2 128) b2 }
k.LE.mul (k.LE.mul (LE.pow k a (b0 + b1 * pow2 64)) (LE.pow k a (b2 * pow2 128)))
(LE.pow k a_pow2_192 b3);
( == ) { LE.lemma_pow_add k a (b0 + b1 * pow2 64) (b2 * pow2 128) }
k.LE.mul (LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128)) (LE.pow k (LE.pow k a (pow2 192)) b3);
( == ) { LE.lemma_pow_mul k a (pow2 192) b3 }
k.LE.mul (LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128)) (LE.pow k a (b3 * pow2 192));
( == ) { LE.lemma_pow_add k a (b0 + b1 * pow2 64 + b2 * pow2 128) (b3 * pow2 192) }
LE.pow k a (b0 + b1 * pow2 64 + b2 * pow2 128 + b3 * pow2 192);
( == ) { lemma_decompose_nat256_as_four_u64 b }
LE.pow k a b;
} | {
"checked_file": "Hacl.Spec.PrecompBaseTable256.fst.checked",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.PrecompBaseTable256.fst"
} | [
"lemma"
] | [
"Lib.Exponentiation.Definition.comm_monoid",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"Prims.pow2",
"Prims.int",
"FStar.Calc.calc_finish",
"Prims.eq2",
"Lib.Exponentiation.exp_four_fw",
"Lib.Exponentiation.Definition.pow",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"Prims.unit",
"FStar.Calc.calc_step",
"Prims.op_Addition",
"FStar.Mul.op_Star",
"Lib.Exponentiation.Definition.__proj__Mkcomm_monoid__item__mul",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"Lib.Exponentiation.exp_four_fw_lemma",
"Prims.squash",
"Lib.Exponentiation.Definition.lemma_pow_mul",
"Lib.Exponentiation.Definition.lemma_pow_add",
"Hacl.Spec.PrecompBaseTable256.lemma_decompose_nat256_as_four_u64",
"FStar.Pervasives.Native.tuple4",
"Hacl.Spec.PrecompBaseTable256.decompose_nat256_as_four_u64"
] | [] | module Hacl.Spec.PrecompBaseTable256
open FStar.Mul
open Lib.IntTypes
module LSeq = Lib.Sequence
module Loops = Lib.LoopCombinators
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BD = Hacl.Spec.Bignum.Definitions
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let lemma_mod_pow2_sub x a b =
calc (==) {
x / pow2 a % pow2 b * pow2 a;
(==) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
x % pow2 (a + b) / pow2 a * pow2 a;
(==) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
(==) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
}
let lemma_decompose_nat256_as_four_u64 x =
let x0 = x % pow2 64 in
let x1 = x / pow2 64 % pow2 64 in
let x2 = x / pow2 128 % pow2 64 in
let x3' = x / pow2 192 % pow2 64 in
Math.Lemmas.lemma_div_lt x 256 192;
Math.Lemmas.small_mod (x / pow2 192) (pow2 64);
let x3 = x / pow2 192 in
assert (x3 == x3');
calc (==) {
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192;
(==) { }
x0 + x1 * pow2 64 + (x / pow2 128 % pow2 64) * pow2 128 + x / pow2 192 * pow2 192;
(==) { lemma_mod_pow2_sub x 128 64 }
x0 + x1 * pow2 64 + x % pow2 192 - x % pow2 128 + x / pow2 192 * pow2 192;
(==) { Math.Lemmas.euclidean_division_definition x (pow2 192) }
x0 + x1 * pow2 64 - x % pow2 128 + x;
(==) { lemma_mod_pow2_sub x 64 64 }
x;
} | false | false | Hacl.Spec.PrecompBaseTable256.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lemma_point_mul_base_precomp4: #t:Type -> k:LE.comm_monoid t -> a:t -> b:nat{b < pow2 256} ->
Lemma (exp_as_exp_four_nat256_precomp k a b == LE.pow k a b) | [] | Hacl.Spec.PrecompBaseTable256.lemma_point_mul_base_precomp4 | {
"file_name": "code/bignum/Hacl.Spec.PrecompBaseTable256.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | k: Lib.Exponentiation.Definition.comm_monoid t -> a: t -> b: Prims.nat{b < Prims.pow2 256}
-> FStar.Pervasives.Lemma
(ensures
Hacl.Spec.PrecompBaseTable256.exp_as_exp_four_nat256_precomp k a b ==
Lib.Exponentiation.Definition.pow k a b) | {
"end_col": 3,
"end_line": 91,
"start_col": 44,
"start_line": 48
} |
FStar.Pervasives.Lemma | val lemma_decompose_nat256_as_four_u64: x:nat{x < pow2 256} ->
Lemma (let (x0, x1, x2, x3) = decompose_nat256_as_four_u64 x in
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192 == x) | [
{
"abbrev": true,
"full_module": "Lib.LoopCombinators",
"short_module": "Loops"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lemma_decompose_nat256_as_four_u64 x =
let x0 = x % pow2 64 in
let x1 = x / pow2 64 % pow2 64 in
let x2 = x / pow2 128 % pow2 64 in
let x3' = x / pow2 192 % pow2 64 in
Math.Lemmas.lemma_div_lt x 256 192;
Math.Lemmas.small_mod (x / pow2 192) (pow2 64);
let x3 = x / pow2 192 in
assert (x3 == x3');
calc (==) {
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192;
(==) { }
x0 + x1 * pow2 64 + (x / pow2 128 % pow2 64) * pow2 128 + x / pow2 192 * pow2 192;
(==) { lemma_mod_pow2_sub x 128 64 }
x0 + x1 * pow2 64 + x % pow2 192 - x % pow2 128 + x / pow2 192 * pow2 192;
(==) { Math.Lemmas.euclidean_division_definition x (pow2 192) }
x0 + x1 * pow2 64 - x % pow2 128 + x;
(==) { lemma_mod_pow2_sub x 64 64 }
x;
} | val lemma_decompose_nat256_as_four_u64: x:nat{x < pow2 256} ->
Lemma (let (x0, x1, x2, x3) = decompose_nat256_as_four_u64 x in
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192 == x)
let lemma_decompose_nat256_as_four_u64 x = | false | null | true | let x0 = x % pow2 64 in
let x1 = x / pow2 64 % pow2 64 in
let x2 = x / pow2 128 % pow2 64 in
let x3' = x / pow2 192 % pow2 64 in
Math.Lemmas.lemma_div_lt x 256 192;
Math.Lemmas.small_mod (x / pow2 192) (pow2 64);
let x3 = x / pow2 192 in
assert (x3 == x3');
calc ( == ) {
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192;
( == ) { () }
x0 + x1 * pow2 64 + (x / pow2 128 % pow2 64) * pow2 128 + (x / pow2 192) * pow2 192;
( == ) { lemma_mod_pow2_sub x 128 64 }
x0 + x1 * pow2 64 + x % pow2 192 - x % pow2 128 + (x / pow2 192) * pow2 192;
( == ) { Math.Lemmas.euclidean_division_definition x (pow2 192) }
x0 + x1 * pow2 64 - x % pow2 128 + x;
( == ) { lemma_mod_pow2_sub x 64 64 }
x;
} | {
"checked_file": "Hacl.Spec.PrecompBaseTable256.fst.checked",
"dependencies": [
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.LoopCombinators.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.PrecompBaseTable256.fst"
} | [
"lemma"
] | [
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"Prims.pow2",
"FStar.Calc.calc_finish",
"Prims.int",
"Prims.eq2",
"Prims.op_Addition",
"FStar.Mul.op_Star",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"Prims.unit",
"FStar.Calc.calc_step",
"Prims.op_Subtraction",
"Prims.op_Modulus",
"Prims.op_Division",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"Prims.squash",
"Hacl.Spec.PrecompBaseTable256.lemma_mod_pow2_sub",
"FStar.Math.Lemmas.euclidean_division_definition",
"Prims._assert",
"FStar.Math.Lemmas.small_mod",
"FStar.Math.Lemmas.lemma_div_lt"
] | [] | module Hacl.Spec.PrecompBaseTable256
open FStar.Mul
open Lib.IntTypes
module LSeq = Lib.Sequence
module Loops = Lib.LoopCombinators
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module BD = Hacl.Spec.Bignum.Definitions
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let lemma_mod_pow2_sub x a b =
calc (==) {
x / pow2 a % pow2 b * pow2 a;
(==) { Math.Lemmas.pow2_modulo_division_lemma_1 x a (a + b) }
x % pow2 (a + b) / pow2 a * pow2 a;
(==) { Math.Lemmas.euclidean_division_definition (x % pow2 (a + b)) (pow2 a) }
x % pow2 (a + b) - x % pow2 (a + b) % pow2 a;
(==) { Math.Lemmas.pow2_modulo_modulo_lemma_1 x a (a + b) }
x % pow2 (a + b) - x % pow2 a;
} | false | false | Hacl.Spec.PrecompBaseTable256.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lemma_decompose_nat256_as_four_u64: x:nat{x < pow2 256} ->
Lemma (let (x0, x1, x2, x3) = decompose_nat256_as_four_u64 x in
x0 + x1 * pow2 64 + x2 * pow2 128 + x3 * pow2 192 == x) | [] | Hacl.Spec.PrecompBaseTable256.lemma_decompose_nat256_as_four_u64 | {
"file_name": "code/bignum/Hacl.Spec.PrecompBaseTable256.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Prims.nat{x < Prims.pow2 256}
-> FStar.Pervasives.Lemma
(ensures
(let _ = Hacl.Spec.PrecompBaseTable256.decompose_nat256_as_four_u64 x in
(let FStar.Pervasives.Native.Mktuple4 #_ #_ #_ #_ x0 x1 x2 x3 = _ in
x0 + x1 * Prims.pow2 64 + x2 * Prims.pow2 128 + x3 * Prims.pow2 192 == x)
<:
Type0)) | {
"end_col": 3,
"end_line": 45,
"start_col": 42,
"start_line": 26
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let safely_bounded (i:ins) =
Instr? i | let safely_bounded (i: ins) = | false | null | false | Instr? i | {
"checked_file": "Vale.Transformers.BoundedInstructionEffects.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"Vale.Transformers.Locations.fsti.checked",
"Vale.Def.PossiblyMonad.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Transformers.BoundedInstructionEffects.fsti"
} | [
"total"
] | [
"Vale.X64.Machine_Semantics_s.ins",
"Vale.X64.Bytes_Code_s.uu___is_Instr",
"Vale.X64.Machine_Semantics_s.instr_annotation",
"Prims.bool"
] | [] | module Vale.Transformers.BoundedInstructionEffects
open Vale.X64.Bytes_Code_s
open Vale.X64.Machine_s
open Vale.X64.Machine_Semantics_s
open Vale.Def.PossiblyMonad
open Vale.Transformers.Locations
module L = FStar.List.Tot
(** A [location_with_value] contains a location and the value it must hold *)
type location_with_value = l:location_eq & location_val_eqt l
(** A [locations_with_values] contains locations and values they must hold *)
type locations_with_values = list location_with_value
(** An [rw_set] contains information about what locations are read and
written by a stateful operation. *)
type rw_set = {
loc_reads: locations;
loc_writes: locations;
loc_constant_writes: locations_with_values;
}
(** [rw_set_of_ins i] returns the read/write sets for the execution of
an instruction. *)
val rw_set_of_ins : i:ins -> rw_set
(** [locations_of_ocmp o] returns the read set for a comparison operator. *)
val locations_of_ocmp : o:ocmp -> locations
(** [unchanged_except exc s1 s2] means all locations that are disjoint
from the exceptions [exc] have the same value in both [s1] and [s2]. *)
let unchanged_except (exceptions:locations) (s1 s2:machine_state) :
GTot Type0 =
(forall (a:location). {:pattern (eval_location a s2)} (
(!!(disjoint_location_from_locations a exceptions) ==>
(eval_location a s1 == eval_location a s2))
))
(** [only_affects locs f] means that running [f] leaves everything
except [locs] unchanged. *)
let only_affects (locs:locations) (f:st unit) : GTot Type0 =
forall s. {:pattern unchanged_except locs s (run f s)} (
(run f s).ms_ok ==> unchanged_except locs s (run f s)
)
(** [unchanged_at locs s1 s2] means the the value of any location in
[locs] is same in both [s1] and [s2]. *)
let rec unchanged_at (locs:locations) (s1 s2:machine_state) : GTot Type0 =
match locs with
| [] -> True
| x :: xs -> (
(eval_location x s1 == eval_location x s2) /\
(unchanged_at xs s1 s2)
)
(** [constant_on_execution locv f s] means that running [f] on [s]
ensures that the values of the locations in [locv] always match
the values given to them in [locv]. *)
let rec constant_on_execution (locv:locations_with_values) (f:st unit) (s:machine_state) : GTot Type0 =
(run f s).ms_ok ==> (
match locv with
| [] -> True
| (|l, v|) :: xs -> (
(eval_location l (run f s) == raise_location_val_eqt v) /\
(constant_on_execution xs f s)
)
)
(** [bounded_effects rw f] means that the execution of [f] is bounded
by the read-write [rw]. This means that whenever two different
states are same at the locations in [rw.loc_reads], then the
function will have the same effect, and that its effect is bounded
to the set [rw.loc_writes]. Additionally, execution always causes
the resultant state to cause the results to be written as per
[rw.loc_constant_writes]. *)
let bounded_effects (rw:rw_set) (f:st unit) : GTot Type0 =
(only_affects rw.loc_writes f) /\
(forall s. {:pattern (constant_on_execution rw.loc_constant_writes f s)}
constant_on_execution rw.loc_constant_writes f s) /\
(forall l v. {:pattern (L.mem (|l,v|) rw.loc_constant_writes); (L.mem l rw.loc_writes)}
L.mem (|l,v|) rw.loc_constant_writes ==> L.mem l rw.loc_writes) /\
(
forall s1 s2. {:pattern (run f s1); (run f s2)} (
(s1.ms_ok = s2.ms_ok /\ unchanged_at rw.loc_reads s1 s2) ==> (
((run f s1).ms_ok = (run f s2).ms_ok) /\
((run f s1).ms_ok ==>
unchanged_at rw.loc_writes (run f s1) (run f s2))
)
)
)
(** Safely bounded instructions are instructions that we can guarantee
[bounded_effects] upon their execution. For the rest of the
instructions, we currently don't have proofs about
[bounded_effects] for them. *) | false | true | Vale.Transformers.BoundedInstructionEffects.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val safely_bounded : i: Vale.X64.Machine_Semantics_s.ins -> Prims.bool | [] | Vale.Transformers.BoundedInstructionEffects.safely_bounded | {
"file_name": "vale/code/lib/transformers/Vale.Transformers.BoundedInstructionEffects.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | i: Vale.X64.Machine_Semantics_s.ins -> Prims.bool | {
"end_col": 10,
"end_line": 100,
"start_col": 2,
"start_line": 100
} |
|
Prims.GTot | val only_affects (locs: locations) (f: st unit) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let only_affects (locs:locations) (f:st unit) : GTot Type0 =
forall s. {:pattern unchanged_except locs s (run f s)} (
(run f s).ms_ok ==> unchanged_except locs s (run f s)
) | val only_affects (locs: locations) (f: st unit) : GTot Type0
let only_affects (locs: locations) (f: st unit) : GTot Type0 = | false | null | false | forall s. {:pattern unchanged_except locs s (run f s)}
((run f s).ms_ok ==> unchanged_except locs s (run f s)) | {
"checked_file": "Vale.Transformers.BoundedInstructionEffects.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"Vale.Transformers.Locations.fsti.checked",
"Vale.Def.PossiblyMonad.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Transformers.BoundedInstructionEffects.fsti"
} | [
"sometrivial"
] | [
"Vale.Transformers.Locations.locations",
"Vale.X64.Machine_Semantics_s.st",
"Prims.unit",
"Prims.l_Forall",
"Vale.X64.Machine_Semantics_s.machine_state",
"Prims.l_imp",
"Prims.b2t",
"Vale.X64.Machine_Semantics_s.__proj__Mkmachine_state__item__ms_ok",
"Vale.X64.Machine_Semantics_s.run",
"Vale.Transformers.BoundedInstructionEffects.unchanged_except"
] | [] | module Vale.Transformers.BoundedInstructionEffects
open Vale.X64.Bytes_Code_s
open Vale.X64.Machine_s
open Vale.X64.Machine_Semantics_s
open Vale.Def.PossiblyMonad
open Vale.Transformers.Locations
module L = FStar.List.Tot
(** A [location_with_value] contains a location and the value it must hold *)
type location_with_value = l:location_eq & location_val_eqt l
(** A [locations_with_values] contains locations and values they must hold *)
type locations_with_values = list location_with_value
(** An [rw_set] contains information about what locations are read and
written by a stateful operation. *)
type rw_set = {
loc_reads: locations;
loc_writes: locations;
loc_constant_writes: locations_with_values;
}
(** [rw_set_of_ins i] returns the read/write sets for the execution of
an instruction. *)
val rw_set_of_ins : i:ins -> rw_set
(** [locations_of_ocmp o] returns the read set for a comparison operator. *)
val locations_of_ocmp : o:ocmp -> locations
(** [unchanged_except exc s1 s2] means all locations that are disjoint
from the exceptions [exc] have the same value in both [s1] and [s2]. *)
let unchanged_except (exceptions:locations) (s1 s2:machine_state) :
GTot Type0 =
(forall (a:location). {:pattern (eval_location a s2)} (
(!!(disjoint_location_from_locations a exceptions) ==>
(eval_location a s1 == eval_location a s2))
))
(** [only_affects locs f] means that running [f] leaves everything
except [locs] unchanged. *) | false | false | Vale.Transformers.BoundedInstructionEffects.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val only_affects (locs: locations) (f: st unit) : GTot Type0 | [] | Vale.Transformers.BoundedInstructionEffects.only_affects | {
"file_name": "vale/code/lib/transformers/Vale.Transformers.BoundedInstructionEffects.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | locs: Vale.Transformers.Locations.locations -> f: Vale.X64.Machine_Semantics_s.st Prims.unit
-> Prims.GTot Type0 | {
"end_col": 3,
"end_line": 47,
"start_col": 2,
"start_line": 45
} |
Prims.Tot | val add_r_to_rw_set (r: locations) (rw: rw_set) : rw_set | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let add_r_to_rw_set (r:locations) (rw:rw_set) : rw_set =
{ rw with loc_reads = r `L.append` rw.loc_reads } | val add_r_to_rw_set (r: locations) (rw: rw_set) : rw_set
let add_r_to_rw_set (r: locations) (rw: rw_set) : rw_set = | false | null | false | { rw with loc_reads = r `L.append` rw.loc_reads } | {
"checked_file": "Vale.Transformers.BoundedInstructionEffects.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"Vale.Transformers.Locations.fsti.checked",
"Vale.Def.PossiblyMonad.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Transformers.BoundedInstructionEffects.fsti"
} | [
"total"
] | [
"Vale.Transformers.Locations.locations",
"Vale.Transformers.BoundedInstructionEffects.rw_set",
"Vale.Transformers.BoundedInstructionEffects.Mkrw_set",
"FStar.List.Tot.Base.append",
"Vale.Transformers.Locations.location",
"Vale.Transformers.BoundedInstructionEffects.__proj__Mkrw_set__item__loc_reads",
"Vale.Transformers.BoundedInstructionEffects.__proj__Mkrw_set__item__loc_writes",
"Vale.Transformers.BoundedInstructionEffects.__proj__Mkrw_set__item__loc_constant_writes"
] | [] | module Vale.Transformers.BoundedInstructionEffects
open Vale.X64.Bytes_Code_s
open Vale.X64.Machine_s
open Vale.X64.Machine_Semantics_s
open Vale.Def.PossiblyMonad
open Vale.Transformers.Locations
module L = FStar.List.Tot
(** A [location_with_value] contains a location and the value it must hold *)
type location_with_value = l:location_eq & location_val_eqt l
(** A [locations_with_values] contains locations and values they must hold *)
type locations_with_values = list location_with_value
(** An [rw_set] contains information about what locations are read and
written by a stateful operation. *)
type rw_set = {
loc_reads: locations;
loc_writes: locations;
loc_constant_writes: locations_with_values;
}
(** [rw_set_of_ins i] returns the read/write sets for the execution of
an instruction. *)
val rw_set_of_ins : i:ins -> rw_set
(** [locations_of_ocmp o] returns the read set for a comparison operator. *)
val locations_of_ocmp : o:ocmp -> locations
(** [unchanged_except exc s1 s2] means all locations that are disjoint
from the exceptions [exc] have the same value in both [s1] and [s2]. *)
let unchanged_except (exceptions:locations) (s1 s2:machine_state) :
GTot Type0 =
(forall (a:location). {:pattern (eval_location a s2)} (
(!!(disjoint_location_from_locations a exceptions) ==>
(eval_location a s1 == eval_location a s2))
))
(** [only_affects locs f] means that running [f] leaves everything
except [locs] unchanged. *)
let only_affects (locs:locations) (f:st unit) : GTot Type0 =
forall s. {:pattern unchanged_except locs s (run f s)} (
(run f s).ms_ok ==> unchanged_except locs s (run f s)
)
(** [unchanged_at locs s1 s2] means the the value of any location in
[locs] is same in both [s1] and [s2]. *)
let rec unchanged_at (locs:locations) (s1 s2:machine_state) : GTot Type0 =
match locs with
| [] -> True
| x :: xs -> (
(eval_location x s1 == eval_location x s2) /\
(unchanged_at xs s1 s2)
)
(** [constant_on_execution locv f s] means that running [f] on [s]
ensures that the values of the locations in [locv] always match
the values given to them in [locv]. *)
let rec constant_on_execution (locv:locations_with_values) (f:st unit) (s:machine_state) : GTot Type0 =
(run f s).ms_ok ==> (
match locv with
| [] -> True
| (|l, v|) :: xs -> (
(eval_location l (run f s) == raise_location_val_eqt v) /\
(constant_on_execution xs f s)
)
)
(** [bounded_effects rw f] means that the execution of [f] is bounded
by the read-write [rw]. This means that whenever two different
states are same at the locations in [rw.loc_reads], then the
function will have the same effect, and that its effect is bounded
to the set [rw.loc_writes]. Additionally, execution always causes
the resultant state to cause the results to be written as per
[rw.loc_constant_writes]. *)
let bounded_effects (rw:rw_set) (f:st unit) : GTot Type0 =
(only_affects rw.loc_writes f) /\
(forall s. {:pattern (constant_on_execution rw.loc_constant_writes f s)}
constant_on_execution rw.loc_constant_writes f s) /\
(forall l v. {:pattern (L.mem (|l,v|) rw.loc_constant_writes); (L.mem l rw.loc_writes)}
L.mem (|l,v|) rw.loc_constant_writes ==> L.mem l rw.loc_writes) /\
(
forall s1 s2. {:pattern (run f s1); (run f s2)} (
(s1.ms_ok = s2.ms_ok /\ unchanged_at rw.loc_reads s1 s2) ==> (
((run f s1).ms_ok = (run f s2).ms_ok) /\
((run f s1).ms_ok ==>
unchanged_at rw.loc_writes (run f s1) (run f s2))
)
)
)
(** Safely bounded instructions are instructions that we can guarantee
[bounded_effects] upon their execution. For the rest of the
instructions, we currently don't have proofs about
[bounded_effects] for them. *)
let safely_bounded (i:ins) =
Instr? i
(** The evaluation of an instruction [i] is bounded by the read/write
set given by [rw_set_of_ins i]. *)
val lemma_machine_eval_ins_st_bounded_effects :
(i:ins) ->
Lemma
(requires (safely_bounded i))
(ensures (
(bounded_effects (rw_set_of_ins i) (machine_eval_ins_st i))))
(** The evaluation of a [code] which is just an instruction [i] is
bounded by the read/write set given by [rw_set_of_ins i]. *)
val lemma_machine_eval_code_Ins_bounded_effects :
(i:ins) ->
(fuel:nat) ->
Lemma
(requires (safely_bounded i))
(ensures (
(bounded_effects (rw_set_of_ins i)
(fun s -> (), (Some?.v (machine_eval_code_ins_def i s))))))
(** The evaluation of a comparison [o] depends solely upon its
locations, given by [locations_of_ocmp o] *)
val lemma_locations_of_ocmp : o:ocmp -> s1:machine_state -> s2:machine_state ->
Lemma
(requires (unchanged_at (locations_of_ocmp o) s1 s2))
(ensures (eval_ocmp s1 o == eval_ocmp s2 o))
(** Add more values into the reads portion of an [rw_set] *) | false | true | Vale.Transformers.BoundedInstructionEffects.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val add_r_to_rw_set (r: locations) (rw: rw_set) : rw_set | [] | Vale.Transformers.BoundedInstructionEffects.add_r_to_rw_set | {
"file_name": "vale/code/lib/transformers/Vale.Transformers.BoundedInstructionEffects.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: Vale.Transformers.Locations.locations -> rw: Vale.Transformers.BoundedInstructionEffects.rw_set
-> Vale.Transformers.BoundedInstructionEffects.rw_set | {
"end_col": 49,
"end_line": 131,
"start_col": 4,
"start_line": 131
} |
Prims.GTot | val unchanged_except (exceptions: locations) (s1 s2: machine_state) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let unchanged_except (exceptions:locations) (s1 s2:machine_state) :
GTot Type0 =
(forall (a:location). {:pattern (eval_location a s2)} (
(!!(disjoint_location_from_locations a exceptions) ==>
(eval_location a s1 == eval_location a s2))
)) | val unchanged_except (exceptions: locations) (s1 s2: machine_state) : GTot Type0
let unchanged_except (exceptions: locations) (s1 s2: machine_state) : GTot Type0 = | false | null | false | (forall (a: location). {:pattern (eval_location a s2)}
((!!(disjoint_location_from_locations a exceptions) ==>
(eval_location a s1 == eval_location a s2)))) | {
"checked_file": "Vale.Transformers.BoundedInstructionEffects.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"Vale.Transformers.Locations.fsti.checked",
"Vale.Def.PossiblyMonad.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Transformers.BoundedInstructionEffects.fsti"
} | [
"sometrivial"
] | [
"Vale.Transformers.Locations.locations",
"Vale.X64.Machine_Semantics_s.machine_state",
"Prims.l_Forall",
"Vale.Transformers.Locations.location",
"Prims.l_imp",
"Prims.b2t",
"Vale.Def.PossiblyMonad.op_Bang_Bang",
"Vale.Transformers.Locations.disjoint_location_from_locations",
"Prims.eq2",
"Vale.Transformers.Locations.location_val_t",
"Vale.Transformers.Locations.eval_location"
] | [] | module Vale.Transformers.BoundedInstructionEffects
open Vale.X64.Bytes_Code_s
open Vale.X64.Machine_s
open Vale.X64.Machine_Semantics_s
open Vale.Def.PossiblyMonad
open Vale.Transformers.Locations
module L = FStar.List.Tot
(** A [location_with_value] contains a location and the value it must hold *)
type location_with_value = l:location_eq & location_val_eqt l
(** A [locations_with_values] contains locations and values they must hold *)
type locations_with_values = list location_with_value
(** An [rw_set] contains information about what locations are read and
written by a stateful operation. *)
type rw_set = {
loc_reads: locations;
loc_writes: locations;
loc_constant_writes: locations_with_values;
}
(** [rw_set_of_ins i] returns the read/write sets for the execution of
an instruction. *)
val rw_set_of_ins : i:ins -> rw_set
(** [locations_of_ocmp o] returns the read set for a comparison operator. *)
val locations_of_ocmp : o:ocmp -> locations
(** [unchanged_except exc s1 s2] means all locations that are disjoint
from the exceptions [exc] have the same value in both [s1] and [s2]. *)
let unchanged_except (exceptions:locations) (s1 s2:machine_state) : | false | false | Vale.Transformers.BoundedInstructionEffects.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val unchanged_except (exceptions: locations) (s1 s2: machine_state) : GTot Type0 | [] | Vale.Transformers.BoundedInstructionEffects.unchanged_except | {
"file_name": "vale/code/lib/transformers/Vale.Transformers.BoundedInstructionEffects.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
exceptions: Vale.Transformers.Locations.locations ->
s1: Vale.X64.Machine_Semantics_s.machine_state ->
s2: Vale.X64.Machine_Semantics_s.machine_state
-> Prims.GTot Type0 | {
"end_col": 6,
"end_line": 40,
"start_col": 2,
"start_line": 37
} |
Prims.GTot | val bounded_effects (rw: rw_set) (f: st unit) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let bounded_effects (rw:rw_set) (f:st unit) : GTot Type0 =
(only_affects rw.loc_writes f) /\
(forall s. {:pattern (constant_on_execution rw.loc_constant_writes f s)}
constant_on_execution rw.loc_constant_writes f s) /\
(forall l v. {:pattern (L.mem (|l,v|) rw.loc_constant_writes); (L.mem l rw.loc_writes)}
L.mem (|l,v|) rw.loc_constant_writes ==> L.mem l rw.loc_writes) /\
(
forall s1 s2. {:pattern (run f s1); (run f s2)} (
(s1.ms_ok = s2.ms_ok /\ unchanged_at rw.loc_reads s1 s2) ==> (
((run f s1).ms_ok = (run f s2).ms_ok) /\
((run f s1).ms_ok ==>
unchanged_at rw.loc_writes (run f s1) (run f s2))
)
)
) | val bounded_effects (rw: rw_set) (f: st unit) : GTot Type0
let bounded_effects (rw: rw_set) (f: st unit) : GTot Type0 = | false | null | false | (only_affects rw.loc_writes f) /\
(forall s. {:pattern (constant_on_execution rw.loc_constant_writes f s)}
constant_on_execution rw.loc_constant_writes f s) /\
(forall l v. {:pattern (L.mem (| l, v |) rw.loc_constant_writes); (L.mem l rw.loc_writes)}
L.mem (| l, v |) rw.loc_constant_writes ==> L.mem l rw.loc_writes) /\
(forall s1 s2. {:pattern (run f s1); (run f s2)}
((s1.ms_ok = s2.ms_ok /\ unchanged_at rw.loc_reads s1 s2) ==>
(((run f s1).ms_ok = (run f s2).ms_ok) /\
((run f s1).ms_ok ==> unchanged_at rw.loc_writes (run f s1) (run f s2))))) | {
"checked_file": "Vale.Transformers.BoundedInstructionEffects.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"Vale.Transformers.Locations.fsti.checked",
"Vale.Def.PossiblyMonad.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Transformers.BoundedInstructionEffects.fsti"
} | [
"sometrivial"
] | [
"Vale.Transformers.BoundedInstructionEffects.rw_set",
"Vale.X64.Machine_Semantics_s.st",
"Prims.unit",
"Prims.l_and",
"Vale.Transformers.BoundedInstructionEffects.only_affects",
"Vale.Transformers.BoundedInstructionEffects.__proj__Mkrw_set__item__loc_writes",
"Prims.l_Forall",
"Vale.X64.Machine_Semantics_s.machine_state",
"Vale.Transformers.BoundedInstructionEffects.constant_on_execution",
"Vale.Transformers.BoundedInstructionEffects.__proj__Mkrw_set__item__loc_constant_writes",
"Vale.Transformers.Locations.location",
"Prims.eq2",
"Vale.Transformers.Locations.location_val_t",
"FStar.Universe.raise_t",
"Vale.Transformers.Locations.location_val_eqt",
"Prims.l_imp",
"Prims.b2t",
"FStar.List.Tot.Base.mem",
"Vale.Transformers.BoundedInstructionEffects.location_with_value",
"Prims.Mkdtuple2",
"Vale.Transformers.Locations.location_eq",
"Prims.op_Equality",
"Prims.bool",
"Vale.X64.Machine_Semantics_s.__proj__Mkmachine_state__item__ms_ok",
"Vale.Transformers.BoundedInstructionEffects.unchanged_at",
"Vale.Transformers.BoundedInstructionEffects.__proj__Mkrw_set__item__loc_reads",
"Vale.X64.Machine_Semantics_s.run"
] | [] | module Vale.Transformers.BoundedInstructionEffects
open Vale.X64.Bytes_Code_s
open Vale.X64.Machine_s
open Vale.X64.Machine_Semantics_s
open Vale.Def.PossiblyMonad
open Vale.Transformers.Locations
module L = FStar.List.Tot
(** A [location_with_value] contains a location and the value it must hold *)
type location_with_value = l:location_eq & location_val_eqt l
(** A [locations_with_values] contains locations and values they must hold *)
type locations_with_values = list location_with_value
(** An [rw_set] contains information about what locations are read and
written by a stateful operation. *)
type rw_set = {
loc_reads: locations;
loc_writes: locations;
loc_constant_writes: locations_with_values;
}
(** [rw_set_of_ins i] returns the read/write sets for the execution of
an instruction. *)
val rw_set_of_ins : i:ins -> rw_set
(** [locations_of_ocmp o] returns the read set for a comparison operator. *)
val locations_of_ocmp : o:ocmp -> locations
(** [unchanged_except exc s1 s2] means all locations that are disjoint
from the exceptions [exc] have the same value in both [s1] and [s2]. *)
let unchanged_except (exceptions:locations) (s1 s2:machine_state) :
GTot Type0 =
(forall (a:location). {:pattern (eval_location a s2)} (
(!!(disjoint_location_from_locations a exceptions) ==>
(eval_location a s1 == eval_location a s2))
))
(** [only_affects locs f] means that running [f] leaves everything
except [locs] unchanged. *)
let only_affects (locs:locations) (f:st unit) : GTot Type0 =
forall s. {:pattern unchanged_except locs s (run f s)} (
(run f s).ms_ok ==> unchanged_except locs s (run f s)
)
(** [unchanged_at locs s1 s2] means the the value of any location in
[locs] is same in both [s1] and [s2]. *)
let rec unchanged_at (locs:locations) (s1 s2:machine_state) : GTot Type0 =
match locs with
| [] -> True
| x :: xs -> (
(eval_location x s1 == eval_location x s2) /\
(unchanged_at xs s1 s2)
)
(** [constant_on_execution locv f s] means that running [f] on [s]
ensures that the values of the locations in [locv] always match
the values given to them in [locv]. *)
let rec constant_on_execution (locv:locations_with_values) (f:st unit) (s:machine_state) : GTot Type0 =
(run f s).ms_ok ==> (
match locv with
| [] -> True
| (|l, v|) :: xs -> (
(eval_location l (run f s) == raise_location_val_eqt v) /\
(constant_on_execution xs f s)
)
)
(** [bounded_effects rw f] means that the execution of [f] is bounded
by the read-write [rw]. This means that whenever two different
states are same at the locations in [rw.loc_reads], then the
function will have the same effect, and that its effect is bounded
to the set [rw.loc_writes]. Additionally, execution always causes
the resultant state to cause the results to be written as per
[rw.loc_constant_writes]. *) | false | false | Vale.Transformers.BoundedInstructionEffects.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val bounded_effects (rw: rw_set) (f: st unit) : GTot Type0 | [] | Vale.Transformers.BoundedInstructionEffects.bounded_effects | {
"file_name": "vale/code/lib/transformers/Vale.Transformers.BoundedInstructionEffects.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
rw: Vale.Transformers.BoundedInstructionEffects.rw_set ->
f: Vale.X64.Machine_Semantics_s.st Prims.unit
-> Prims.GTot Type0 | {
"end_col": 3,
"end_line": 93,
"start_col": 2,
"start_line": 80
} |
Prims.GTot | val constant_on_execution (locv: locations_with_values) (f: st unit) (s: machine_state) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec constant_on_execution (locv:locations_with_values) (f:st unit) (s:machine_state) : GTot Type0 =
(run f s).ms_ok ==> (
match locv with
| [] -> True
| (|l, v|) :: xs -> (
(eval_location l (run f s) == raise_location_val_eqt v) /\
(constant_on_execution xs f s)
)
) | val constant_on_execution (locv: locations_with_values) (f: st unit) (s: machine_state) : GTot Type0
let rec constant_on_execution (locv: locations_with_values) (f: st unit) (s: machine_state)
: GTot Type0 = | false | null | false | (run f s).ms_ok ==>
(match locv with
| [] -> True
| (| l , v |) :: xs ->
((eval_location l (run f s) == raise_location_val_eqt v) /\ (constant_on_execution xs f s))) | {
"checked_file": "Vale.Transformers.BoundedInstructionEffects.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"Vale.Transformers.Locations.fsti.checked",
"Vale.Def.PossiblyMonad.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Transformers.BoundedInstructionEffects.fsti"
} | [
"sometrivial"
] | [
"Vale.Transformers.BoundedInstructionEffects.locations_with_values",
"Vale.X64.Machine_Semantics_s.st",
"Prims.unit",
"Vale.X64.Machine_Semantics_s.machine_state",
"Prims.l_imp",
"Prims.b2t",
"Vale.X64.Machine_Semantics_s.__proj__Mkmachine_state__item__ms_ok",
"Vale.X64.Machine_Semantics_s.run",
"Prims.l_True",
"Vale.Transformers.Locations.location_eq",
"Vale.Transformers.Locations.location_val_eqt",
"Prims.list",
"Vale.Transformers.BoundedInstructionEffects.location_with_value",
"Prims.l_and",
"Prims.eq2",
"Vale.Transformers.Locations.location_val_t",
"Vale.Transformers.Locations.eval_location",
"Vale.Transformers.Locations.raise_location_val_eqt",
"Vale.Transformers.BoundedInstructionEffects.constant_on_execution",
"Prims.logical"
] | [] | module Vale.Transformers.BoundedInstructionEffects
open Vale.X64.Bytes_Code_s
open Vale.X64.Machine_s
open Vale.X64.Machine_Semantics_s
open Vale.Def.PossiblyMonad
open Vale.Transformers.Locations
module L = FStar.List.Tot
(** A [location_with_value] contains a location and the value it must hold *)
type location_with_value = l:location_eq & location_val_eqt l
(** A [locations_with_values] contains locations and values they must hold *)
type locations_with_values = list location_with_value
(** An [rw_set] contains information about what locations are read and
written by a stateful operation. *)
type rw_set = {
loc_reads: locations;
loc_writes: locations;
loc_constant_writes: locations_with_values;
}
(** [rw_set_of_ins i] returns the read/write sets for the execution of
an instruction. *)
val rw_set_of_ins : i:ins -> rw_set
(** [locations_of_ocmp o] returns the read set for a comparison operator. *)
val locations_of_ocmp : o:ocmp -> locations
(** [unchanged_except exc s1 s2] means all locations that are disjoint
from the exceptions [exc] have the same value in both [s1] and [s2]. *)
let unchanged_except (exceptions:locations) (s1 s2:machine_state) :
GTot Type0 =
(forall (a:location). {:pattern (eval_location a s2)} (
(!!(disjoint_location_from_locations a exceptions) ==>
(eval_location a s1 == eval_location a s2))
))
(** [only_affects locs f] means that running [f] leaves everything
except [locs] unchanged. *)
let only_affects (locs:locations) (f:st unit) : GTot Type0 =
forall s. {:pattern unchanged_except locs s (run f s)} (
(run f s).ms_ok ==> unchanged_except locs s (run f s)
)
(** [unchanged_at locs s1 s2] means the the value of any location in
[locs] is same in both [s1] and [s2]. *)
let rec unchanged_at (locs:locations) (s1 s2:machine_state) : GTot Type0 =
match locs with
| [] -> True
| x :: xs -> (
(eval_location x s1 == eval_location x s2) /\
(unchanged_at xs s1 s2)
)
(** [constant_on_execution locv f s] means that running [f] on [s]
ensures that the values of the locations in [locv] always match
the values given to them in [locv]. *) | false | false | Vale.Transformers.BoundedInstructionEffects.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val constant_on_execution (locv: locations_with_values) (f: st unit) (s: machine_state) : GTot Type0 | [
"recursion"
] | Vale.Transformers.BoundedInstructionEffects.constant_on_execution | {
"file_name": "vale/code/lib/transformers/Vale.Transformers.BoundedInstructionEffects.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
locv: Vale.Transformers.BoundedInstructionEffects.locations_with_values ->
f: Vale.X64.Machine_Semantics_s.st Prims.unit ->
s: Vale.X64.Machine_Semantics_s.machine_state
-> Prims.GTot Type0 | {
"end_col": 3,
"end_line": 70,
"start_col": 2,
"start_line": 63
} |
Prims.GTot | val unchanged_at (locs: locations) (s1 s2: machine_state) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "L"
},
{
"abbrev": false,
"full_module": "Vale.Transformers.Locations",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.PossiblyMonad",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Transformers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec unchanged_at (locs:locations) (s1 s2:machine_state) : GTot Type0 =
match locs with
| [] -> True
| x :: xs -> (
(eval_location x s1 == eval_location x s2) /\
(unchanged_at xs s1 s2)
) | val unchanged_at (locs: locations) (s1 s2: machine_state) : GTot Type0
let rec unchanged_at (locs: locations) (s1 s2: machine_state) : GTot Type0 = | false | null | false | match locs with
| [] -> True
| x :: xs -> ((eval_location x s1 == eval_location x s2) /\ (unchanged_at xs s1 s2)) | {
"checked_file": "Vale.Transformers.BoundedInstructionEffects.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"Vale.Transformers.Locations.fsti.checked",
"Vale.Def.PossiblyMonad.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Transformers.BoundedInstructionEffects.fsti"
} | [
"sometrivial"
] | [
"Vale.Transformers.Locations.locations",
"Vale.X64.Machine_Semantics_s.machine_state",
"Prims.l_True",
"Vale.Transformers.Locations.location",
"Prims.list",
"Prims.l_and",
"Prims.eq2",
"Vale.Transformers.Locations.location_val_t",
"Vale.Transformers.Locations.eval_location",
"Vale.Transformers.BoundedInstructionEffects.unchanged_at"
] | [] | module Vale.Transformers.BoundedInstructionEffects
open Vale.X64.Bytes_Code_s
open Vale.X64.Machine_s
open Vale.X64.Machine_Semantics_s
open Vale.Def.PossiblyMonad
open Vale.Transformers.Locations
module L = FStar.List.Tot
(** A [location_with_value] contains a location and the value it must hold *)
type location_with_value = l:location_eq & location_val_eqt l
(** A [locations_with_values] contains locations and values they must hold *)
type locations_with_values = list location_with_value
(** An [rw_set] contains information about what locations are read and
written by a stateful operation. *)
type rw_set = {
loc_reads: locations;
loc_writes: locations;
loc_constant_writes: locations_with_values;
}
(** [rw_set_of_ins i] returns the read/write sets for the execution of
an instruction. *)
val rw_set_of_ins : i:ins -> rw_set
(** [locations_of_ocmp o] returns the read set for a comparison operator. *)
val locations_of_ocmp : o:ocmp -> locations
(** [unchanged_except exc s1 s2] means all locations that are disjoint
from the exceptions [exc] have the same value in both [s1] and [s2]. *)
let unchanged_except (exceptions:locations) (s1 s2:machine_state) :
GTot Type0 =
(forall (a:location). {:pattern (eval_location a s2)} (
(!!(disjoint_location_from_locations a exceptions) ==>
(eval_location a s1 == eval_location a s2))
))
(** [only_affects locs f] means that running [f] leaves everything
except [locs] unchanged. *)
let only_affects (locs:locations) (f:st unit) : GTot Type0 =
forall s. {:pattern unchanged_except locs s (run f s)} (
(run f s).ms_ok ==> unchanged_except locs s (run f s)
)
(** [unchanged_at locs s1 s2] means the the value of any location in
[locs] is same in both [s1] and [s2]. *) | false | false | Vale.Transformers.BoundedInstructionEffects.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val unchanged_at (locs: locations) (s1 s2: machine_state) : GTot Type0 | [
"recursion"
] | Vale.Transformers.BoundedInstructionEffects.unchanged_at | {
"file_name": "vale/code/lib/transformers/Vale.Transformers.BoundedInstructionEffects.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
locs: Vale.Transformers.Locations.locations ->
s1: Vale.X64.Machine_Semantics_s.machine_state ->
s2: Vale.X64.Machine_Semantics_s.machine_state
-> Prims.GTot Type0 | {
"end_col": 5,
"end_line": 57,
"start_col": 2,
"start_line": 52
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "EverCrypt.TargetConfig",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let getter (flag: bool) = unit -> Stack bool
(requires (fun _ -> true))
(ensures (fun h0 b h1 ->
B.(modifies loc_none h0 h1) /\
(b ==> flag))) | let getter (flag: bool) = | false | null | false | unit
-> Stack bool
(requires (fun _ -> true))
(ensures (fun h0 b h1 -> B.(modifies loc_none h0 h1) /\ (b ==> flag))) | {
"checked_file": "EverCrypt.AutoConfig2.fsti.checked",
"dependencies": [
"Vale.X64.CPU_Features_s.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.TargetConfig.fsti.checked"
],
"interface_file": false,
"source_file": "EverCrypt.AutoConfig2.fsti"
} | [
"total"
] | [
"Prims.bool",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"Prims.b2t",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.l_imp"
] | [] | (** This module, unlike the previous attempt at autoconfig, is entirely written
in Low* + Vale, and does not play dirty tricks with global variables. As such,
there is no C implementation for it, only an .fst file.
This module revolves around individual feature flags, which can be selectively
disabled. *)
module EverCrypt.AutoConfig2
open FStar.HyperStack.ST
open EverCrypt.TargetConfig
module B = LowStar.Buffer
(** Each flag can be queried; we cache the results in mutable global variables,
hidden behind an abstract footprint. Calling a getter requires no reasoning
about the abstract footprint from the client. *)
unfold | false | true | EverCrypt.AutoConfig2.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val getter : flag: Prims.bool -> Type0 | [] | EverCrypt.AutoConfig2.getter | {
"file_name": "providers/evercrypt/EverCrypt.AutoConfig2.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | flag: Prims.bool -> Type0 | {
"end_col": 18,
"end_line": 24,
"start_col": 26,
"start_line": 20
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "EverCrypt.TargetConfig",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let disabler = unit -> Stack unit
(requires (fun _ -> true))
(ensures (fun h0 _ h1 -> B.(modifies (fp ()) h0 h1))) | let disabler = | false | null | false | unit
-> Stack unit
(requires (fun _ -> true))
(ensures (fun h0 _ h1 -> let open B in modifies (fp ()) h0 h1)) | {
"checked_file": "EverCrypt.AutoConfig2.fsti.checked",
"dependencies": [
"Vale.X64.CPU_Features_s.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.TargetConfig.fsti.checked"
],
"interface_file": false,
"source_file": "EverCrypt.AutoConfig2.fsti"
} | [
"total"
] | [
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"Prims.b2t",
"LowStar.Monotonic.Buffer.modifies",
"EverCrypt.AutoConfig2.fp"
] | [] | (** This module, unlike the previous attempt at autoconfig, is entirely written
in Low* + Vale, and does not play dirty tricks with global variables. As such,
there is no C implementation for it, only an .fst file.
This module revolves around individual feature flags, which can be selectively
disabled. *)
module EverCrypt.AutoConfig2
open FStar.HyperStack.ST
open EverCrypt.TargetConfig
module B = LowStar.Buffer
(** Each flag can be queried; we cache the results in mutable global variables,
hidden behind an abstract footprint. Calling a getter requires no reasoning
about the abstract footprint from the client. *)
unfold
inline_for_extraction noextract
let getter (flag: bool) = unit -> Stack bool
(requires (fun _ -> true))
(ensures (fun h0 b h1 ->
B.(modifies loc_none h0 h1) /\
(b ==> flag)))
val has_shaext: getter Vale.X64.CPU_Features_s.sha_enabled
val has_aesni: getter Vale.X64.CPU_Features_s.aesni_enabled
val has_pclmulqdq: getter Vale.X64.CPU_Features_s.pclmulqdq_enabled
val has_avx2: getter Vale.X64.CPU_Features_s.avx2_enabled
val has_avx: getter Vale.X64.CPU_Features_s.avx_enabled
val has_bmi2: getter Vale.X64.CPU_Features_s.bmi2_enabled
val has_adx: getter Vale.X64.CPU_Features_s.adx_enabled
val has_sse: getter Vale.X64.CPU_Features_s.sse_enabled
val has_movbe: getter Vale.X64.CPU_Features_s.movbe_enabled
val has_rdrand: getter Vale.X64.CPU_Features_s.rdrand_enabled
(** At the moment, has_avx512 contains the AVX512_F, AVX512_DQ, AVX512_BW and AVX512_VL flags
See Vale.X64.CPU_Features_s for more details. **)
val has_avx512: getter Vale.X64.CPU_Features_s.avx512_enabled
(** A set of functions that modify the global cached results. For this, the
client needs to reason about the abstract footprint. *)
val fp: unit -> GTot B.loc
(* A client that needs to allocate first then call init should use recall before
anything else; this way, the client will be able to derive disjointness of their
allocations and of fp. *)
val recall: unit -> Stack unit
(requires (fun _ -> True))
(ensures (fun h0 _ h1 ->
B.(loc_not_unused_in h1 `loc_includes` (fp ())) /\ h0 == h1))
(* By default, all feature flags are disabled. A client must call init to get
meaningful results from the various has_* functions. *)
val init: unit -> Stack unit
(requires (fun _ -> True))
(ensures (fun h0 _ h1 ->
B.modifies (fp ()) h0 h1)) | false | true | EverCrypt.AutoConfig2.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val disabler : Type0 | [] | EverCrypt.AutoConfig2.disabler | {
"file_name": "providers/evercrypt/EverCrypt.AutoConfig2.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 55,
"end_line": 63,
"start_col": 15,
"start_line": 61
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "EverCrypt.TargetConfig",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let vec256_enabled = Vale.X64.CPU_Features_s.avx2_enabled || vec256_not_avx2_enabled | let vec256_enabled = | false | null | false | Vale.X64.CPU_Features_s.avx2_enabled || vec256_not_avx2_enabled | {
"checked_file": "EverCrypt.AutoConfig2.fsti.checked",
"dependencies": [
"Vale.X64.CPU_Features_s.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.TargetConfig.fsti.checked"
],
"interface_file": false,
"source_file": "EverCrypt.AutoConfig2.fsti"
} | [
"total"
] | [
"Prims.op_BarBar",
"Vale.X64.CPU_Features_s.avx2_enabled",
"EverCrypt.TargetConfig.vec256_not_avx2_enabled"
] | [] | (** This module, unlike the previous attempt at autoconfig, is entirely written
in Low* + Vale, and does not play dirty tricks with global variables. As such,
there is no C implementation for it, only an .fst file.
This module revolves around individual feature flags, which can be selectively
disabled. *)
module EverCrypt.AutoConfig2
open FStar.HyperStack.ST
open EverCrypt.TargetConfig
module B = LowStar.Buffer
(** Each flag can be queried; we cache the results in mutable global variables,
hidden behind an abstract footprint. Calling a getter requires no reasoning
about the abstract footprint from the client. *)
unfold
inline_for_extraction noextract
let getter (flag: bool) = unit -> Stack bool
(requires (fun _ -> true))
(ensures (fun h0 b h1 ->
B.(modifies loc_none h0 h1) /\
(b ==> flag)))
val has_shaext: getter Vale.X64.CPU_Features_s.sha_enabled
val has_aesni: getter Vale.X64.CPU_Features_s.aesni_enabled
val has_pclmulqdq: getter Vale.X64.CPU_Features_s.pclmulqdq_enabled
val has_avx2: getter Vale.X64.CPU_Features_s.avx2_enabled
val has_avx: getter Vale.X64.CPU_Features_s.avx_enabled
val has_bmi2: getter Vale.X64.CPU_Features_s.bmi2_enabled
val has_adx: getter Vale.X64.CPU_Features_s.adx_enabled
val has_sse: getter Vale.X64.CPU_Features_s.sse_enabled
val has_movbe: getter Vale.X64.CPU_Features_s.movbe_enabled
val has_rdrand: getter Vale.X64.CPU_Features_s.rdrand_enabled
(** At the moment, has_avx512 contains the AVX512_F, AVX512_DQ, AVX512_BW and AVX512_VL flags
See Vale.X64.CPU_Features_s for more details. **)
val has_avx512: getter Vale.X64.CPU_Features_s.avx512_enabled
(** A set of functions that modify the global cached results. For this, the
client needs to reason about the abstract footprint. *)
val fp: unit -> GTot B.loc
(* A client that needs to allocate first then call init should use recall before
anything else; this way, the client will be able to derive disjointness of their
allocations and of fp. *)
val recall: unit -> Stack unit
(requires (fun _ -> True))
(ensures (fun h0 _ h1 ->
B.(loc_not_unused_in h1 `loc_includes` (fp ())) /\ h0 == h1))
(* By default, all feature flags are disabled. A client must call init to get
meaningful results from the various has_* functions. *)
val init: unit -> Stack unit
(requires (fun _ -> True))
(ensures (fun h0 _ h1 ->
B.modifies (fp ()) h0 h1))
inline_for_extraction
let disabler = unit -> Stack unit
(requires (fun _ -> true))
(ensures (fun h0 _ h1 -> B.(modifies (fp ()) h0 h1)))
(* In order to selectively take codepaths, a client might disable either feature
flags, to, say, pick one Vale implementation over another. Alternatively, if the
codepath taken does not depend on a particular feature flag (e.g. OpenSSL vs.
BCrypt) the client can disable a provider entirely. *)
val disable_avx2: disabler
val disable_avx: disabler
val disable_bmi2: disabler
val disable_adx: disabler
val disable_shaext: disabler
val disable_aesni: disabler
val disable_pclmulqdq: disabler
val disable_sse: disabler
val disable_movbe: disabler
val disable_rdrand: disabler
val disable_avx512: disabler
(** Some predicates to dynamically guard the vectorized code *)
(* Note that those predicates don't check [EverCrypt.TargetConfig.hacl_can_compile_vec128],
* [EverCrypt.TargetConfig.hacl_can_compile_vale], etc.
* The reason is that the above booleans are static preconditions, checked at
* compilation time. The F* code must thus be guard the following way (note that
* the order of the arguments is important for syntactic reasons):
* [> if EverCrypt.TargetConfig.hacl_can_compile_vec128 && has_vec128 ... then
* Leading to the following C code:
* [> #if defined(COMPILE_128)
* [> if has_vec128 ... { ... }
* [> #endif
* Note that if one forgets to guard the code with flags like
* [EverCrypt.TargetConfig.hacl_can_compile_vec128], the code will not compile on platforms
* not satisfying the requirements.
*)
noextract
let vec128_enabled = Vale.X64.CPU_Features_s.avx_enabled || vec128_not_avx_enabled | false | true | EverCrypt.AutoConfig2.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val vec256_enabled : Prims.bool | [] | EverCrypt.AutoConfig2.vec256_enabled | {
"file_name": "providers/evercrypt/EverCrypt.AutoConfig2.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Prims.bool | {
"end_col": 84,
"end_line": 100,
"start_col": 21,
"start_line": 100
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "EverCrypt.TargetConfig",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let vec128_enabled = Vale.X64.CPU_Features_s.avx_enabled || vec128_not_avx_enabled | let vec128_enabled = | false | null | false | Vale.X64.CPU_Features_s.avx_enabled || vec128_not_avx_enabled | {
"checked_file": "EverCrypt.AutoConfig2.fsti.checked",
"dependencies": [
"Vale.X64.CPU_Features_s.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.TargetConfig.fsti.checked"
],
"interface_file": false,
"source_file": "EverCrypt.AutoConfig2.fsti"
} | [
"total"
] | [
"Prims.op_BarBar",
"Vale.X64.CPU_Features_s.avx_enabled",
"EverCrypt.TargetConfig.vec128_not_avx_enabled"
] | [] | (** This module, unlike the previous attempt at autoconfig, is entirely written
in Low* + Vale, and does not play dirty tricks with global variables. As such,
there is no C implementation for it, only an .fst file.
This module revolves around individual feature flags, which can be selectively
disabled. *)
module EverCrypt.AutoConfig2
open FStar.HyperStack.ST
open EverCrypt.TargetConfig
module B = LowStar.Buffer
(** Each flag can be queried; we cache the results in mutable global variables,
hidden behind an abstract footprint. Calling a getter requires no reasoning
about the abstract footprint from the client. *)
unfold
inline_for_extraction noextract
let getter (flag: bool) = unit -> Stack bool
(requires (fun _ -> true))
(ensures (fun h0 b h1 ->
B.(modifies loc_none h0 h1) /\
(b ==> flag)))
val has_shaext: getter Vale.X64.CPU_Features_s.sha_enabled
val has_aesni: getter Vale.X64.CPU_Features_s.aesni_enabled
val has_pclmulqdq: getter Vale.X64.CPU_Features_s.pclmulqdq_enabled
val has_avx2: getter Vale.X64.CPU_Features_s.avx2_enabled
val has_avx: getter Vale.X64.CPU_Features_s.avx_enabled
val has_bmi2: getter Vale.X64.CPU_Features_s.bmi2_enabled
val has_adx: getter Vale.X64.CPU_Features_s.adx_enabled
val has_sse: getter Vale.X64.CPU_Features_s.sse_enabled
val has_movbe: getter Vale.X64.CPU_Features_s.movbe_enabled
val has_rdrand: getter Vale.X64.CPU_Features_s.rdrand_enabled
(** At the moment, has_avx512 contains the AVX512_F, AVX512_DQ, AVX512_BW and AVX512_VL flags
See Vale.X64.CPU_Features_s for more details. **)
val has_avx512: getter Vale.X64.CPU_Features_s.avx512_enabled
(** A set of functions that modify the global cached results. For this, the
client needs to reason about the abstract footprint. *)
val fp: unit -> GTot B.loc
(* A client that needs to allocate first then call init should use recall before
anything else; this way, the client will be able to derive disjointness of their
allocations and of fp. *)
val recall: unit -> Stack unit
(requires (fun _ -> True))
(ensures (fun h0 _ h1 ->
B.(loc_not_unused_in h1 `loc_includes` (fp ())) /\ h0 == h1))
(* By default, all feature flags are disabled. A client must call init to get
meaningful results from the various has_* functions. *)
val init: unit -> Stack unit
(requires (fun _ -> True))
(ensures (fun h0 _ h1 ->
B.modifies (fp ()) h0 h1))
inline_for_extraction
let disabler = unit -> Stack unit
(requires (fun _ -> true))
(ensures (fun h0 _ h1 -> B.(modifies (fp ()) h0 h1)))
(* In order to selectively take codepaths, a client might disable either feature
flags, to, say, pick one Vale implementation over another. Alternatively, if the
codepath taken does not depend on a particular feature flag (e.g. OpenSSL vs.
BCrypt) the client can disable a provider entirely. *)
val disable_avx2: disabler
val disable_avx: disabler
val disable_bmi2: disabler
val disable_adx: disabler
val disable_shaext: disabler
val disable_aesni: disabler
val disable_pclmulqdq: disabler
val disable_sse: disabler
val disable_movbe: disabler
val disable_rdrand: disabler
val disable_avx512: disabler
(** Some predicates to dynamically guard the vectorized code *)
(* Note that those predicates don't check [EverCrypt.TargetConfig.hacl_can_compile_vec128],
* [EverCrypt.TargetConfig.hacl_can_compile_vale], etc.
* The reason is that the above booleans are static preconditions, checked at
* compilation time. The F* code must thus be guard the following way (note that
* the order of the arguments is important for syntactic reasons):
* [> if EverCrypt.TargetConfig.hacl_can_compile_vec128 && has_vec128 ... then
* Leading to the following C code:
* [> #if defined(COMPILE_128)
* [> if has_vec128 ... { ... }
* [> #endif
* Note that if one forgets to guard the code with flags like
* [EverCrypt.TargetConfig.hacl_can_compile_vec128], the code will not compile on platforms
* not satisfying the requirements.
*) | false | true | EverCrypt.AutoConfig2.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val vec128_enabled : Prims.bool | [] | EverCrypt.AutoConfig2.vec128_enabled | {
"file_name": "providers/evercrypt/EverCrypt.AutoConfig2.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Prims.bool | {
"end_col": 82,
"end_line": 98,
"start_col": 21,
"start_line": 98
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let srel (a:Type0) = Preorder.preorder (Seq.seq a) | let srel (a: Type0) = | false | null | false | Preorder.preorder (Seq.seq a) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"FStar.Preorder.preorder",
"FStar.Seq.Base.seq"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*) | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val srel : a: Type0 -> Type | [] | LowStar.Monotonic.Buffer.srel | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type0 -> Type | {
"end_col": 57,
"end_line": 31,
"start_col": 28,
"start_line": 31
} |
|
FStar.Pervasives.Lemma | val loc_includes_trans_backwards (s1 s2 s3: loc)
: Lemma (requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3 | val loc_includes_trans_backwards (s1 s2 s3: loc)
: Lemma (requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
let loc_includes_trans_backwards (s1 s2 s3: loc)
: Lemma (requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)] = | false | null | true | loc_includes_trans s1 s2 s3 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.loc_includes_trans",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3)) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_includes_trans_backwards (s1 s2 s3: loc)
: Lemma (requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)] | [] | LowStar.Monotonic.Buffer.loc_includes_trans_backwards | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc ->
s3: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes s1 s2 /\ LowStar.Monotonic.Buffer.loc_includes s2 s3)
(ensures LowStar.Monotonic.Buffer.loc_includes s1 s3)
[
SMTPat (LowStar.Monotonic.Buffer.loc_includes s1 s3);
SMTPat (LowStar.Monotonic.Buffer.loc_includes s2 s3)
] | {
"end_col": 29,
"end_line": 622,
"start_col": 2,
"start_line": 622
} |
FStar.Pervasives.Lemma | val loc_includes_union_l' (s1 s2 s: loc)
: Lemma (requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s | val loc_includes_union_l' (s1 s2 s: loc)
: Lemma (requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
let loc_includes_union_l' (s1 s2 s: loc)
: Lemma (requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)] = | false | null | true | loc_includes_union_l s1 s2 s | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"Prims.unit",
"Prims.l_or",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s)) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_includes_union_l' (s1 s2 s: loc)
: Lemma (requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)] | [] | LowStar.Monotonic.Buffer.loc_includes_union_l' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc ->
s: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes s1 s \/ LowStar.Monotonic.Buffer.loc_includes s2 s)
(ensures LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2) s)
[SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2) s)] | {
"end_col": 32,
"end_line": 643,
"start_col": 4,
"start_line": 643
} |
Prims.Tot | val buf (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : buf_t | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|) | val buf (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : buf_t
let buf (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : buf_t = | false | null | false | (| a, rrel, rel, b |) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Pervasives.Mkdtuple4",
"LowStar.Monotonic.Buffer.buf_t"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buf (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : buf_t | [] | LowStar.Monotonic.Buffer.buf | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> LowStar.Monotonic.Buffer.buf_t | {
"end_col": 91,
"end_line": 991,
"start_col": 72,
"start_line": 991
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel | let buf_t = | false | null | false | a: Type0 & rrel: srel a & rel: srel a & mbuffer a rrel rel | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"FStar.Pervasives.dtuple4",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buf_t : Type | [] | LowStar.Monotonic.Buffer.buf_t | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Type | {
"end_col": 67,
"end_line": 987,
"start_col": 12,
"start_line": 987
} |
|
Prims.Tot | val all_live (h: HS.mem) (l: list buf_t) : Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l | val all_live (h: HS.mem) (l: list buf_t) : Type0
let all_live (h: HS.mem) (l: list buf_t) : Type0 = | false | null | false | BigOps.big_and #buf_t (fun (| _ , _ , _ , b |) -> live h b) l | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperStack.mem",
"Prims.list",
"LowStar.Monotonic.Buffer.buf_t",
"FStar.BigOps.big_and",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.live"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val all_live (h: HS.mem) (l: list buf_t) : Type0 | [] | LowStar.Monotonic.Buffer.all_live | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h: FStar.Monotonic.HyperStack.mem -> l: Prims.list LowStar.Monotonic.Buffer.buf_t -> Type0 | {
"end_col": 60,
"end_line": 998,
"start_col": 2,
"start_line": 998
} |
Prims.GTot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none) | let loc_union_l (l: list loc) = | false | null | false | BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"Prims.list",
"LowStar.Monotonic.Buffer.loc",
"FStar.BigOps.normal",
"FStar.List.Tot.Base.fold_right_gtot",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.Monotonic.Buffer.loc_none"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_union_l : l: Prims.list LowStar.Monotonic.Buffer.loc -> Prims.GTot LowStar.Monotonic.Buffer.loc | [] | LowStar.Monotonic.Buffer.loc_union_l | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | l: Prims.list LowStar.Monotonic.Buffer.loc -> Prims.GTot LowStar.Monotonic.Buffer.loc | {
"end_col": 63,
"end_line": 1012,
"start_col": 2,
"start_line": 1012
} |
|
Prims.Tot | val loc_pairwise_disjoint (l: list loc) : Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l | val loc_pairwise_disjoint (l: list loc) : Type0
let loc_pairwise_disjoint (l: list loc) : Type0 = | false | null | false | BigOps.pairwise_and loc_disjoint l | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"Prims.list",
"LowStar.Monotonic.Buffer.loc",
"FStar.BigOps.pairwise_and",
"LowStar.Monotonic.Buffer.loc_disjoint"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"] | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_pairwise_disjoint (l: list loc) : Type0 | [] | LowStar.Monotonic.Buffer.loc_pairwise_disjoint | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | l: Prims.list LowStar.Monotonic.Buffer.loc -> Type0 | {
"end_col": 82,
"end_line": 1019,
"start_col": 48,
"start_line": 1019
} |
Prims.GTot | val fresh_loc (l: loc) (h h': HS.mem) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l | val fresh_loc (l: loc) (h h': HS.mem) : GTot Type0
let fresh_loc (l: loc) (h h': HS.mem) : GTot Type0 = | false | null | false | (loc_unused_in h) `loc_includes` l /\ (loc_not_unused_in h') `loc_includes` l | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_unused_in",
"LowStar.Monotonic.Buffer.loc_not_unused_in"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val fresh_loc (l: loc) (h h': HS.mem) : GTot Type0 | [] | LowStar.Monotonic.Buffer.fresh_loc | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
l: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 39,
"end_line": 1676,
"start_col": 2,
"start_line": 1675
} |
Prims.GTot | val disjoint
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2) | val disjoint
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0
let disjoint
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 = | false | null | false | loc_disjoint (loc_buffer b1) (loc_buffer b2) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.Monotonic.Buffer.loc_buffer"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val disjoint
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 | [] | LowStar.Monotonic.Buffer.disjoint | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b1: LowStar.Monotonic.Buffer.mbuffer a1 rrel1 rel1 ->
b2: LowStar.Monotonic.Buffer.mbuffer a2 rrel2 rel2
-> Prims.GTot Type0 | {
"end_col": 46,
"end_line": 1727,
"start_col": 2,
"start_line": 1727
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l | let loc_in (l: loc) (h: HS.mem) = | false | null | false | (loc_not_unused_in h) `loc_includes` l | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_not_unused_in"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *) | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_in : l: LowStar.Monotonic.Buffer.loc -> h: FStar.Monotonic.HyperStack.mem -> Type0 | [] | LowStar.Monotonic.Buffer.loc_in | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | l: LowStar.Monotonic.Buffer.loc -> h: FStar.Monotonic.HyperStack.mem -> Type0 | {
"end_col": 38,
"end_line": 1563,
"start_col": 2,
"start_line": 1563
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel | let rrel_rel_always_compatible (#a: Type0) (rrel rel: srel a) = | false | null | false | forall (len: nat) (i: nat) (j: nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"Prims.l_Forall",
"Prims.nat",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowStar.Monotonic.Buffer.compatible_subseq_preorder",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val rrel_rel_always_compatible : rrel: LowStar.Monotonic.Buffer.srel a -> rel: LowStar.Monotonic.Buffer.srel a -> Prims.logical | [] | LowStar.Monotonic.Buffer.rrel_rel_always_compatible | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | rrel: LowStar.Monotonic.Buffer.srel a -> rel: LowStar.Monotonic.Buffer.srel a -> Prims.logical | {
"end_col": 99,
"end_line": 1922,
"start_col": 2,
"start_line": 1922
} |
|
Prims.GTot | val includes
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2) | val includes
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0
let includes
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 = | false | null | false | loc_includes (loc_buffer b1) (loc_buffer b2) /\ (g_is_null b1 <==> g_is_null b2) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_buffer",
"Prims.l_iff",
"Prims.b2t",
"LowStar.Monotonic.Buffer.g_is_null"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val includes
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: GTot Type0 | [] | LowStar.Monotonic.Buffer.includes | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b1: LowStar.Monotonic.Buffer.mbuffer a1 rrel1 rel1 ->
b2: LowStar.Monotonic.Buffer.mbuffer a2 rrel2 rel2
-> Prims.GTot Type0 | {
"end_col": 34,
"end_line": 1732,
"start_col": 2,
"start_line": 1731
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let spred (a:Type0) = Seq.seq a -> Type0 | let spred (a: Type0) = | false | null | false | Seq.seq a -> Type0 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*) | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val spred : a: Type0 -> Type | [] | LowStar.Monotonic.Buffer.spred | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type0 -> Type | {
"end_col": 47,
"end_line": 1968,
"start_col": 29,
"start_line": 1968
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let alloca_pre (len:U32.t) = U32.v len > 0 | let alloca_pre (len: U32.t) = | false | null | false | U32.v len > 0 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"FStar.UInt32.t",
"Prims.op_GreaterThan",
"FStar.UInt32.v",
"Prims.bool"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``. | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val alloca_pre : len: FStar.UInt32.t -> Prims.bool | [] | LowStar.Monotonic.Buffer.alloca_pre | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | len: FStar.UInt32.t -> Prims.bool | {
"end_col": 49,
"end_line": 2204,
"start_col": 36,
"start_line": 2204
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)} | let lmbuffer (a: Type0) (rrel rel: srel a) (len: nat) = | false | null | false | b: mbuffer a rrel rel {length b == len /\ not (g_is_null b)} | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"Prims.nat",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.length",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lmbuffer : a: Type0 ->
rrel: LowStar.Monotonic.Buffer.srel a ->
rel: LowStar.Monotonic.Buffer.srel a ->
len: Prims.nat
-> Type0 | [] | LowStar.Monotonic.Buffer.lmbuffer | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
a: Type0 ->
rrel: LowStar.Monotonic.Buffer.srel a ->
rel: LowStar.Monotonic.Buffer.srel a ->
len: Prims.nat
-> Type0 | {
"end_col": 62,
"end_line": 2088,
"start_col": 4,
"start_line": 2088
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)} | let lmbuffer_or_null (a: Type0) (rrel rel: srel a) (len: nat) (r: HS.rid) = | false | null | false | b: mbuffer a rrel rel {(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)} | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"Prims.nat",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.length",
"LowStar.Monotonic.Buffer.frameOf"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lmbuffer_or_null : a: Type0 ->
rrel: LowStar.Monotonic.Buffer.srel a ->
rel: LowStar.Monotonic.Buffer.srel a ->
len: Prims.nat ->
r: FStar.Monotonic.HyperHeap.rid
-> Type0 | [] | LowStar.Monotonic.Buffer.lmbuffer_or_null | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
a: Type0 ->
rrel: LowStar.Monotonic.Buffer.srel a ->
rel: LowStar.Monotonic.Buffer.srel a ->
len: Prims.nat ->
r: FStar.Monotonic.HyperHeap.rid
-> Type0 | {
"end_col": 85,
"end_line": 2102,
"start_col": 4,
"start_line": 2102
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcmalloc_of_list_pre (#a:Type0) (r:HS.rid) (init:list a) =
HST.is_eternal_region r /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32) | let gcmalloc_of_list_pre (#a: Type0) (r: HS.rid) (init: list a) = | false | null | false | HST.is_eternal_region r /\ normalize (FStar.List.Tot.length init <= UInt.max_int 32) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.list",
"Prims.l_and",
"FStar.HyperStack.ST.is_eternal_region",
"FStar.Pervasives.normalize",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.List.Tot.Base.length",
"FStar.UInt.max_int",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``.
unfold let alloca_pre (len:U32.t) = U32.v len > 0
(*
* See the Allocation comment above when changing the spec
*)
val malloca (#a:Type0) (#rrel:srel a)
(init:a) (len:U32.t)
:HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\
frameOf b == HS.get_tip h0))
(*
* Allocate a stack buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val malloca_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires fun h0 ->
alloca_pre len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)) /\
frameOf b == HS.get_tip h0)
/// ``alloca_of_list init`` allocates a buffer in the current stack
/// frame. The initial values of the cells of this buffer are
/// specified by the ``init`` list, which must be nonempty, and of
/// length representable as a machine integer.
unfold let alloca_of_list_pre (#a:Type0) (init:list a) =
normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val malloca_of_list (#a:Type0) (#rrel:srel a) (init: list a)
:HST.StackInline (lmbuffer a rrel rrel (normalize_term (List.Tot.length init)))
(requires (fun _ -> alloca_of_list_pre init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init) /\
frameOf b == HS.get_tip h0)) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcmalloc_of_list_pre : r: FStar.Monotonic.HyperHeap.rid -> init: Prims.list a -> Prims.logical | [] | LowStar.Monotonic.Buffer.gcmalloc_of_list_pre | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> init: Prims.list a -> Prims.logical | {
"end_col": 59,
"end_line": 2253,
"start_col": 2,
"start_line": 2252
} |
|
Prims.Tot | val abuffer (region: HS.rid) (addr: nat) : Tot Type0 | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let abuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (abuffer' region addr) | val abuffer (region: HS.rid) (addr: nat) : Tot Type0
let abuffer (region: HS.rid) (addr: nat) : Tot Type0 = | false | null | false | G.erased (abuffer' region addr) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"FStar.Ghost.erased",
"LowStar.Monotonic.Buffer.abuffer'"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``.
unfold let alloca_pre (len:U32.t) = U32.v len > 0
(*
* See the Allocation comment above when changing the spec
*)
val malloca (#a:Type0) (#rrel:srel a)
(init:a) (len:U32.t)
:HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\
frameOf b == HS.get_tip h0))
(*
* Allocate a stack buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val malloca_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires fun h0 ->
alloca_pre len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)) /\
frameOf b == HS.get_tip h0)
/// ``alloca_of_list init`` allocates a buffer in the current stack
/// frame. The initial values of the cells of this buffer are
/// specified by the ``init`` list, which must be nonempty, and of
/// length representable as a machine integer.
unfold let alloca_of_list_pre (#a:Type0) (init:list a) =
normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val malloca_of_list (#a:Type0) (#rrel:srel a) (init: list a)
:HST.StackInline (lmbuffer a rrel rrel (normalize_term (List.Tot.length init)))
(requires (fun _ -> alloca_of_list_pre init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init) /\
frameOf b == HS.get_tip h0))
unfold let gcmalloc_of_list_pre (#a:Type0) (r:HS.rid) (init:list a) =
HST.is_eternal_region r /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc_of_list (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer a rrel rrel (normalize_term (List.Tot.length init)){frameOf b == r /\ recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_of_list_partial (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r{recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init)))
= mgcmalloc_of_list r init
unfold let alloc_drgn_pre (h:HS.mem) (d:HST.drgn) (len:U32.t) = h `HS.live_region` (HST.rid_of_drgn d) /\ U32.v len > 0
val mmalloc_drgn (#a:Type0) (#rrel:srel a)
(d:HST.drgn) (init:a) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b})
(requires fun h -> alloc_drgn_pre h d len)
(ensures fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init))
val mmalloc_drgn_mm (#a:Type0) (#rrel:srel a)
(d:HST.drgn) (init:a) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ freeable b})
(requires fun h -> alloc_drgn_pre h d len)
(ensures fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init))
val mmalloc_drgn_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a)
(d:HST.drgn) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b})
(requires fun h ->
alloc_drgn_pre h d len /\
live h src /\
U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(***** End allocation functions *****)
/// Derived operations
val blit (#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(src:mbuffer a rrel1 rel1)
(idx_src:U32.t)
(dst:mbuffer a rrel2 rel2)
(idx_dst:U32.t)
(len:U32.t)
:HST.Stack unit (requires (fun h -> live h src /\ live h dst /\
U32.v idx_src + U32.v len <= length src /\
U32.v idx_dst + U32.v len <= length dst /\
(* TODO: remove the rhs part of this disjunction once patterns on loc_buffer_from_to are introduced *)
(loc_disjoint (loc_buffer_from_to src idx_src (idx_src `U32.add` len)) (loc_buffer_from_to dst idx_dst (idx_dst `U32.add` len)) \/ disjoint src dst) /\
rel2 (as_seq h dst)
(Seq.replace_subseq (as_seq h dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len)
(Seq.slice (as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len)))))
(ensures (fun h _ h' -> modifies (loc_buffer dst) h h' /\
live h' dst /\
Seq.slice (as_seq h' dst) (U32.v idx_dst) (U32.v idx_dst + U32.v len) ==
Seq.slice (as_seq h src) (U32.v idx_src) (U32.v idx_src + U32.v len) /\
Seq.slice (as_seq h' dst) 0 (U32.v idx_dst) ==
Seq.slice (as_seq h dst) 0 (U32.v idx_dst) /\
Seq.slice (as_seq h' dst) (U32.v idx_dst + U32.v len) (length dst) ==
Seq.slice (as_seq h dst) (U32.v idx_dst + U32.v len) (length dst)))
val fill (#t:Type) (#rrel #rel: srel t)
(b: mbuffer t rrel rel)
(z:t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
live h b /\
U32.v len <= length b /\
rel (as_seq h b) (Seq.replace_subseq (as_seq h b) 0 (U32.v len) (Seq.create (U32.v len) z))
))
(ensures (fun h0 _ h1 ->
modifies (loc_buffer b) h0 h1 /\
live h1 b /\
Seq.slice (as_seq h1 b) 0 (U32.v len) == Seq.create (U32.v len) z /\
Seq.slice (as_seq h1 b) (U32.v len) (length b) == Seq.slice (as_seq h0 b) (U32.v len) (length b)
))
/// Type class instantiation for compositionality with other kinds of memory locations than regions, references or buffers (just in case).
/// No usage pattern has been found yet.
module MG = FStar.ModifiesGen
val abuffer' (region: HS.rid) (addr: nat) : Tot Type0 | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val abuffer (region: HS.rid) (addr: nat) : Tot Type0 | [] | LowStar.Monotonic.Buffer.abuffer | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | region: FStar.Monotonic.HyperHeap.rid -> addr: Prims.nat -> Type0 | {
"end_col": 86,
"end_line": 2356,
"start_col": 55,
"start_line": 2356
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let alloca_of_list_pre (#a:Type0) (init:list a) =
normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32) | let alloca_of_list_pre (#a: Type0) (init: list a) = | false | null | false | normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"Prims.list",
"Prims.l_and",
"FStar.Pervasives.normalize",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.List.Tot.Base.length",
"Prims.op_LessThanOrEqual",
"FStar.UInt.max_int",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``.
unfold let alloca_pre (len:U32.t) = U32.v len > 0
(*
* See the Allocation comment above when changing the spec
*)
val malloca (#a:Type0) (#rrel:srel a)
(init:a) (len:U32.t)
:HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\
frameOf b == HS.get_tip h0))
(*
* Allocate a stack buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val malloca_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires fun h0 ->
alloca_pre len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)) /\
frameOf b == HS.get_tip h0)
/// ``alloca_of_list init`` allocates a buffer in the current stack
/// frame. The initial values of the cells of this buffer are
/// specified by the ``init`` list, which must be nonempty, and of
/// length representable as a machine integer. | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val alloca_of_list_pre : init: Prims.list a -> Prims.logical | [] | LowStar.Monotonic.Buffer.alloca_of_list_pre | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | init: Prims.list a -> Prims.logical | {
"end_col": 59,
"end_line": 2240,
"start_col": 2,
"start_line": 2239
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let alloc_drgn_pre (h:HS.mem) (d:HST.drgn) (len:U32.t) = h `HS.live_region` (HST.rid_of_drgn d) /\ U32.v len > 0 | let alloc_drgn_pre (h: HS.mem) (d: HST.drgn) (len: U32.t) = | false | null | false | h `HS.live_region` (HST.rid_of_drgn d) /\ U32.v len > 0 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.drgn",
"FStar.UInt32.t",
"Prims.l_and",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"FStar.HyperStack.ST.rid_of_drgn",
"Prims.op_GreaterThan",
"FStar.UInt32.v",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``.
unfold let alloca_pre (len:U32.t) = U32.v len > 0
(*
* See the Allocation comment above when changing the spec
*)
val malloca (#a:Type0) (#rrel:srel a)
(init:a) (len:U32.t)
:HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\
frameOf b == HS.get_tip h0))
(*
* Allocate a stack buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val malloca_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires fun h0 ->
alloca_pre len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)) /\
frameOf b == HS.get_tip h0)
/// ``alloca_of_list init`` allocates a buffer in the current stack
/// frame. The initial values of the cells of this buffer are
/// specified by the ``init`` list, which must be nonempty, and of
/// length representable as a machine integer.
unfold let alloca_of_list_pre (#a:Type0) (init:list a) =
normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val malloca_of_list (#a:Type0) (#rrel:srel a) (init: list a)
:HST.StackInline (lmbuffer a rrel rrel (normalize_term (List.Tot.length init)))
(requires (fun _ -> alloca_of_list_pre init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init) /\
frameOf b == HS.get_tip h0))
unfold let gcmalloc_of_list_pre (#a:Type0) (r:HS.rid) (init:list a) =
HST.is_eternal_region r /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc_of_list (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer a rrel rrel (normalize_term (List.Tot.length init)){frameOf b == r /\ recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_of_list_partial (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r{recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init)))
= mgcmalloc_of_list r init | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val alloc_drgn_pre : h: FStar.Monotonic.HyperStack.mem -> d: FStar.HyperStack.ST.drgn -> len: FStar.UInt32.t
-> Prims.logical | [] | LowStar.Monotonic.Buffer.alloc_drgn_pre | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h: FStar.Monotonic.HyperStack.mem -> d: FStar.HyperStack.ST.drgn -> len: FStar.UInt32.t
-> Prims.logical | {
"end_col": 119,
"end_line": 2275,
"start_col": 64,
"start_line": 2275
} |
|
FStar.Pervasives.Lemma | val live_not_unused_in' (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (b: mbuffer a rrel rel)
: Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b | val live_not_unused_in' (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (b: mbuffer a rrel rel)
: Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
let live_not_unused_in' (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (b: mbuffer a rrel rel)
: Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)] = | false | null | true | live_not_unused_in h b | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.live_not_unused_in",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"LowStar.Monotonic.Buffer.unused_in",
"Prims.squash",
"Prims.l_False",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val live_not_unused_in' (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (b: mbuffer a rrel rel)
: Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)] | [] | LowStar.Monotonic.Buffer.live_not_unused_in' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h: FStar.Monotonic.HyperStack.mem -> b: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma
(requires LowStar.Monotonic.Buffer.live h b /\ LowStar.Monotonic.Buffer.unused_in b h)
(ensures Prims.l_False)
[SMTPat (LowStar.Monotonic.Buffer.live h b); SMTPat (LowStar.Monotonic.Buffer.unused_in b h)] | {
"end_col": 26,
"end_line": 153,
"start_col": 4,
"start_line": 153
} |
Prims.GTot | val loc_region_only (preserve_liveness: bool) (r: HS.rid) : GTot loc | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r) | val loc_region_only (preserve_liveness: bool) (r: HS.rid) : GTot loc
let loc_region_only (preserve_liveness: bool) (r: HS.rid) : GTot loc = | false | null | false | loc_regions preserve_liveness (Set.singleton r) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"Prims.bool",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.loc_regions",
"FStar.Set.singleton",
"LowStar.Monotonic.Buffer.loc"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_region_only (preserve_liveness: bool) (r: HS.rid) : GTot loc | [] | LowStar.Monotonic.Buffer.loc_region_only | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | preserve_liveness: Prims.bool -> r: FStar.Monotonic.HyperHeap.rid
-> Prims.GTot LowStar.Monotonic.Buffer.loc | {
"end_col": 49,
"end_line": 577,
"start_col": 2,
"start_line": 577
} |
Prims.GTot | val loc_all_regions_from (preserve_liveness: bool) (r: HS.rid) : GTot loc | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r)) | val loc_all_regions_from (preserve_liveness: bool) (r: HS.rid) : GTot loc
let loc_all_regions_from (preserve_liveness: bool) (r: HS.rid) : GTot loc = | false | null | false | loc_regions preserve_liveness (HS.mod_set (Set.singleton r)) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"Prims.bool",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.loc_regions",
"FStar.Monotonic.HyperHeap.mod_set",
"FStar.Set.singleton",
"LowStar.Monotonic.Buffer.loc"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_all_regions_from (preserve_liveness: bool) (r: HS.rid) : GTot loc | [] | LowStar.Monotonic.Buffer.loc_all_regions_from | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | preserve_liveness: Prims.bool -> r: FStar.Monotonic.HyperHeap.rid
-> Prims.GTot LowStar.Monotonic.Buffer.loc | {
"end_col": 62,
"end_line": 590,
"start_col": 2,
"start_line": 590
} |
Prims.Tot | val all_disjoint (l: list loc) : Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l | val all_disjoint (l: list loc) : Type0
let all_disjoint (l: list loc) : Type0 = | false | null | false | BigOps.pairwise_and loc_disjoint l | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"Prims.list",
"LowStar.Monotonic.Buffer.loc",
"FStar.BigOps.pairwise_and",
"LowStar.Monotonic.Buffer.loc_disjoint"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val all_disjoint (l: list loc) : Type0 | [] | LowStar.Monotonic.Buffer.all_disjoint | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | l: Prims.list LowStar.Monotonic.Buffer.loc -> Type0 | {
"end_col": 36,
"end_line": 1005,
"start_col": 2,
"start_line": 1005
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l | let loc_not_in (l: loc) (h: HS.mem) = | false | null | false | (loc_unused_in h) `loc_includes` l | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_unused_in"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_not_in : l: LowStar.Monotonic.Buffer.loc -> h: FStar.Monotonic.HyperStack.mem -> Type0 | [] | LowStar.Monotonic.Buffer.loc_not_in | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | l: LowStar.Monotonic.Buffer.loc -> h: FStar.Monotonic.HyperStack.mem -> Type0 | {
"end_col": 34,
"end_line": 1566,
"start_col": 2,
"start_line": 1566
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) | let compatible_subseq_preorder
(#a: Type0)
(len: nat)
(rel: srel a)
(i: nat)
(j: nat{i <= j /\ j <= len})
(sub_rel: srel a)
= | false | null | false | (forall (s1: Seq.seq a) (s2: Seq.seq a).
{:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))}
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==>
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\
(forall (s: Seq.seq a) (s2: Seq.seq a).
{:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))}
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==>
(rel s (Seq.replace_subseq s i j s2))) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"Prims.nat",
"LowStar.Monotonic.Buffer.srel",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.l_Forall",
"FStar.Seq.Base.seq",
"Prims.l_imp",
"Prims.eq2",
"FStar.Seq.Base.length",
"FStar.Seq.Base.slice",
"Prims.int",
"Prims.op_Subtraction",
"FStar.Seq.Properties.replace_subseq",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val compatible_subseq_preorder : len: Prims.nat ->
rel: LowStar.Monotonic.Buffer.srel a ->
i: Prims.nat ->
j: Prims.nat{i <= j /\ j <= len} ->
sub_rel: LowStar.Monotonic.Buffer.srel a
-> Prims.logical | [] | LowStar.Monotonic.Buffer.compatible_subseq_preorder | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
len: Prims.nat ->
rel: LowStar.Monotonic.Buffer.srel a ->
i: Prims.nat ->
j: Prims.nat{i <= j /\ j <= len} ->
sub_rel: LowStar.Monotonic.Buffer.srel a
-> Prims.logical | {
"end_col": 50,
"end_line": 45,
"start_col": 4,
"start_line": 40
} |
|
Prims.Ghost | val get (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (p: mbuffer a rrel rel) (i: nat)
: Ghost a (requires (i < length p)) (ensures (fun _ -> True)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i | val get (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (p: mbuffer a rrel rel) (i: nat)
: Ghost a (requires (i < length p)) (ensures (fun _ -> True))
let get (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (p: mbuffer a rrel rel) (i: nat)
: Ghost a (requires (i < length p)) (ensures (fun _ -> True)) = | false | null | false | Seq.index (as_seq h p) i | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.nat",
"FStar.Seq.Base.index",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.b2t",
"Prims.op_LessThan",
"LowStar.Monotonic.Buffer.length",
"Prims.l_True"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val get (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (p: mbuffer a rrel rel) (i: nat)
: Ghost a (requires (i < length p)) (ensures (fun _ -> True)) | [] | LowStar.Monotonic.Buffer.get | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h: FStar.Monotonic.HyperStack.mem -> p: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> i: Prims.nat
-> Prims.Ghost a | {
"end_col": 28,
"end_line": 248,
"start_col": 4,
"start_line": 248
} |
Prims.GTot | val length (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : GTot nat | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b) | val length (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : GTot nat
let length (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : GTot nat = | false | null | false | U32.v (len b) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.len",
"Prims.nat"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val length (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : GTot nat | [] | LowStar.Monotonic.Buffer.length | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> Prims.GTot Prims.nat | {
"end_col": 90,
"end_line": 206,
"start_col": 77,
"start_line": 206
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2 | let stable_on (#a: Type0) (p: spred a) (rel: srel a) = | false | null | false | forall (s1: Seq.seq a) (s2: Seq.seq a). {:pattern (p s1); (rel s1 s2); (p s2)}
(p s1 /\ rel s1 s2) ==> p s2 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.spred",
"LowStar.Monotonic.Buffer.srel",
"Prims.l_Forall",
"FStar.Seq.Base.seq",
"Prims.l_imp",
"Prims.l_and",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val stable_on : p: LowStar.Monotonic.Buffer.spred a -> rel: LowStar.Monotonic.Buffer.srel a -> Prims.logical | [] | LowStar.Monotonic.Buffer.stable_on | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | p: LowStar.Monotonic.Buffer.spred a -> rel: LowStar.Monotonic.Buffer.srel a -> Prims.logical | {
"end_col": 94,
"end_line": 1975,
"start_col": 2,
"start_line": 1975
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s | let alloc_post_mem_common
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h0 h1: HS.mem)
(s: Seq.seq a)
= | false | null | false | live h1 b /\ unused_in b h0 /\
(Map.domain (HS.get_hmap h1)) `Set.equal` (Map.domain (HS.get_hmap h0)) /\
(HS.get_tip h1) == (HS.get_tip h0) /\ modifies loc_none h0 h1 /\ as_seq h1 b == s | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.seq",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"LowStar.Monotonic.Buffer.unused_in",
"FStar.Set.equal",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Map.domain",
"FStar.Monotonic.Heap.heap",
"FStar.Monotonic.HyperStack.get_hmap",
"Prims.eq2",
"FStar.Monotonic.HyperStack.get_tip",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_none",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val alloc_post_mem_common : b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem ->
s: FStar.Seq.Base.seq a
-> Prims.logical | [] | LowStar.Monotonic.Buffer.alloc_post_mem_common | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem ->
s: FStar.Seq.Base.seq a
-> Prims.logical | {
"end_col": 20,
"end_line": 2098,
"start_col": 4,
"start_line": 2093
} |
|
FStar.Pervasives.Lemma | val loc_union_idem_1 (s1 s2: loc)
: Lemma (loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2 | val loc_union_idem_1 (s1 s2: loc)
: Lemma (loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
let loc_union_idem_1 (s1 s2: loc)
: Lemma (loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))] = | false | null | true | loc_union_assoc s1 s1 s2 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.loc_union_assoc",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_union_idem_1 (s1 s2: loc)
: Lemma (loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))] | [] | LowStar.Monotonic.Buffer.loc_union_idem_1 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s1: LowStar.Monotonic.Buffer.loc -> s2: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_union s1 (LowStar.Monotonic.Buffer.loc_union s1 s2) ==
LowStar.Monotonic.Buffer.loc_union s1 s2)
[SMTPat (LowStar.Monotonic.Buffer.loc_union s1 (LowStar.Monotonic.Buffer.loc_union s1 s2))] | {
"end_col": 26,
"end_line": 444,
"start_col": 2,
"start_line": 444
} |
Prims.GTot | val loc_freed_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b)) | val loc_freed_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc
let loc_freed_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc = | false | null | false | loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b)) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"LowStar.Monotonic.Buffer.loc_addresses",
"FStar.Monotonic.HyperStack.frameOf",
"FStar.Set.singleton",
"Prims.nat",
"FStar.Monotonic.HyperStack.as_addr",
"LowStar.Monotonic.Buffer.loc"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_freed_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc | [] | LowStar.Monotonic.Buffer.loc_freed_mreference | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: FStar.Monotonic.HyperStack.mreference a p -> Prims.GTot LowStar.Monotonic.Buffer.loc | {
"end_col": 67,
"end_line": 565,
"start_col": 2,
"start_line": 565
} |
FStar.Pervasives.Lemma | val loc_disjoint_sym' (s1 s2: loc)
: Lemma (loc_disjoint s1 s2 <==> loc_disjoint s2 s1) [SMTPat (loc_disjoint s1 s2)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1 | val loc_disjoint_sym' (s1 s2: loc)
: Lemma (loc_disjoint s1 s2 <==> loc_disjoint s2 s1) [SMTPat (loc_disjoint s1 s2)]
let loc_disjoint_sym' (s1 s2: loc)
: Lemma (loc_disjoint s1 s2 <==> loc_disjoint s2 s1) [SMTPat (loc_disjoint s1 s2)] = | false | null | true | Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Classical.move_requires",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.Monotonic.Buffer.loc_disjoint_sym",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.l_iff",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_disjoint_sym' (s1 s2: loc)
: Lemma (loc_disjoint s1 s2 <==> loc_disjoint s2 s1) [SMTPat (loc_disjoint s1 s2)] | [] | LowStar.Monotonic.Buffer.loc_disjoint_sym' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s1: LowStar.Monotonic.Buffer.loc -> s2: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_disjoint s1 s2 <==> LowStar.Monotonic.Buffer.loc_disjoint s2 s1
) [SMTPat (LowStar.Monotonic.Buffer.loc_disjoint s1 s2)] | {
"end_col": 50,
"end_line": 897,
"start_col": 2,
"start_line": 896
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel | let compatible_sub
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(i: U32.t)
(len: U32.t{U32.v i + U32.v len <= length b})
(sub_rel: srel a)
= | false | null | false | compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.length",
"LowStar.Monotonic.Buffer.compatible_subseq_preorder",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val compatible_sub : b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
i: FStar.UInt32.t ->
len: FStar.UInt32.t{FStar.UInt32.v i + FStar.UInt32.v len <= LowStar.Monotonic.Buffer.length b} ->
sub_rel: LowStar.Monotonic.Buffer.srel a
-> Prims.logical | [] | LowStar.Monotonic.Buffer.compatible_sub | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
i: FStar.UInt32.t ->
len: FStar.UInt32.t{FStar.UInt32.v i + FStar.UInt32.v len <= LowStar.Monotonic.Buffer.length b} ->
sub_rel: LowStar.Monotonic.Buffer.srel a
-> Prims.logical | {
"end_col": 85,
"end_line": 273,
"start_col": 4,
"start_line": 273
} |
|
FStar.Pervasives.Lemma | val loc_includes_union_l_regions (s1 s2: loc) (prf: bool) (r: Set.set HS.rid)
: Lemma (requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r) | val loc_includes_union_l_regions (s1 s2: loc) (prf: bool) (r: Set.set HS.rid)
: Lemma (requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
let loc_includes_union_l_regions (s1 s2: loc) (prf: bool) (r: Set.set HS.rid)
: Lemma (requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))] = | false | null | true | loc_includes_union_l s1 s2 (loc_regions prf r) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"Prims.bool",
"FStar.Set.set",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"LowStar.Monotonic.Buffer.loc_regions",
"Prims.unit",
"Prims.l_or",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r))) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_includes_union_l_regions (s1 s2: loc) (prf: bool) (r: Set.set HS.rid)
: Lemma (requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))] | [] | LowStar.Monotonic.Buffer.loc_includes_union_l_regions | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc ->
prf: Prims.bool ->
r: FStar.Set.set FStar.Monotonic.HyperHeap.rid
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes s1 (LowStar.Monotonic.Buffer.loc_regions prf r) \/
LowStar.Monotonic.Buffer.loc_includes s2 (LowStar.Monotonic.Buffer.loc_regions prf r))
(ensures
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_regions prf r))
[
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_regions prf r))
] | {
"end_col": 48,
"end_line": 875,
"start_col": 2,
"start_line": 875
} |
FStar.Pervasives.Lemma | val loc_disjoint_includes_r (b1 b2 b2': loc)
: Lemma (requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2' | val loc_disjoint_includes_r (b1 b2 b2': loc)
: Lemma (requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
let loc_disjoint_includes_r (b1 b2 b2': loc)
: Lemma (requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')] = | false | null | true | loc_disjoint_includes b1 b2 b1 b2' | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.loc_disjoint_includes",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_disjoint",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2')) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_disjoint_includes_r (b1 b2 b2': loc)
: Lemma (requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')] | [] | LowStar.Monotonic.Buffer.loc_disjoint_includes_r | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b1: LowStar.Monotonic.Buffer.loc ->
b2: LowStar.Monotonic.Buffer.loc ->
b2': LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes b2 b2' /\ LowStar.Monotonic.Buffer.loc_disjoint b1 b2)
(ensures LowStar.Monotonic.Buffer.loc_disjoint b1 b2')
[
SMTPat (LowStar.Monotonic.Buffer.loc_disjoint b1 b2');
SMTPat (LowStar.Monotonic.Buffer.loc_includes b2 b2')
] | {
"end_col": 36,
"end_line": 936,
"start_col": 2,
"start_line": 936
} |
FStar.Pervasives.Lemma | val modifies_trans_linear (l l_goal: loc) (h1 h2 h3: HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3 | val modifies_trans_linear (l l_goal: loc) (h1 h2 h3: HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
let modifies_trans_linear (l l_goal: loc) (h1 h2 h3: HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)] = | false | null | true | modifies_trans l h1 h2 l_goal h3 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.modifies_trans",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3)) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_trans_linear (l l_goal: loc) (h1 h2 h3: HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)] | [] | LowStar.Monotonic.Buffer.modifies_trans_linear | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
l: LowStar.Monotonic.Buffer.loc ->
l_goal: LowStar.Monotonic.Buffer.loc ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem ->
h3: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l h1 h2 /\ LowStar.Monotonic.Buffer.modifies l_goal h2 h3 /\
LowStar.Monotonic.Buffer.loc_includes l_goal l)
(ensures LowStar.Monotonic.Buffer.modifies l_goal h1 h3)
[
SMTPat (LowStar.Monotonic.Buffer.modifies l h1 h2);
SMTPat (LowStar.Monotonic.Buffer.modifies l_goal h1 h3)
] | {
"end_col": 36,
"end_line": 1307,
"start_col": 4,
"start_line": 1307
} |
FStar.Pervasives.Lemma | val modifies_liveness_insensitive_mreference_weak
(l: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ h `HS.contains` x)
)
(ensures (h' `HS.contains` x))
[
SMTPatOr
[
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h')];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h')]
]
] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x | val modifies_liveness_insensitive_mreference_weak
(l: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ h `HS.contains` x)
)
(ensures (h' `HS.contains` x))
[
SMTPatOr
[
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h')];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h')]
]
]
let modifies_liveness_insensitive_mreference_weak
(l: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ h `HS.contains` x)
)
(ensures (h' `HS.contains` x))
[
SMTPatOr
[
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h')];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h')]
]
] = | false | null | true | modifies_liveness_insensitive_mreference loc_none l h h' x | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"LowStar.Monotonic.Buffer.modifies_liveness_insensitive_mreference",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.address_liveness_insensitive_locs",
"FStar.Monotonic.HyperStack.contains",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat_or",
"Prims.list",
"FStar.Pervasives.smt_pat",
"Prims.logical",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');]; | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_liveness_insensitive_mreference_weak
(l: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ h `HS.contains` x)
)
(ensures (h' `HS.contains` x))
[
SMTPatOr
[
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h')];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h')]
]
] | [] | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_mreference_weak | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
l: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: FStar.Monotonic.HyperStack.mreference t pre
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l h h' /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.address_liveness_insensitive_locs
l /\ FStar.Monotonic.HyperStack.contains h x)
(ensures FStar.Monotonic.HyperStack.contains h' x)
[
SMTPatOr [
[
SMTPat (FStar.Monotonic.HyperStack.contains h x);
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
];
[
SMTPat (FStar.Monotonic.HyperStack.contains h' x);
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
]
]
] | {
"end_col": 62,
"end_line": 1194,
"start_col": 4,
"start_line": 1194
} |
FStar.Pervasives.Lemma | val loc_includes_union_l_addresses (s1 s2: loc) (prf: bool) (r: HS.rid) (a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a))
)
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a) | val loc_includes_union_l_addresses (s1 s2: loc) (prf: bool) (r: HS.rid) (a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a))
)
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
let loc_includes_union_l_addresses (s1 s2: loc) (prf: bool) (r: HS.rid) (a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a))
)
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))] = | false | null | true | loc_includes_union_l s1 s2 (loc_addresses prf r a) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"Prims.bool",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Set.set",
"Prims.nat",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"LowStar.Monotonic.Buffer.loc_addresses",
"Prims.unit",
"Prims.l_or",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a))) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_includes_union_l_addresses (s1 s2: loc) (prf: bool) (r: HS.rid) (a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a))
)
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))] | [] | LowStar.Monotonic.Buffer.loc_includes_union_l_addresses | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc ->
prf: Prims.bool ->
r: FStar.Monotonic.HyperHeap.rid ->
a: FStar.Set.set Prims.nat
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes s1 (LowStar.Monotonic.Buffer.loc_addresses prf r a) \/
LowStar.Monotonic.Buffer.loc_includes s2 (LowStar.Monotonic.Buffer.loc_addresses prf r a))
(ensures
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_addresses prf r a))
[
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_addresses prf r a))
] | {
"end_col": 52,
"end_line": 865,
"start_col": 2,
"start_line": 865
} |
Prims.GTot | val loc_addr_of_buffer (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : GTot loc | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b)) | val loc_addr_of_buffer (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : GTot loc
let loc_addr_of_buffer (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : GTot loc = | false | null | false | loc_addresses false (frameOf b) (Set.singleton (as_addr b)) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.loc_addresses",
"LowStar.Monotonic.Buffer.frameOf",
"FStar.Set.singleton",
"Prims.nat",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.loc"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_addr_of_buffer (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) : GTot loc | [] | LowStar.Monotonic.Buffer.loc_addr_of_buffer | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> Prims.GTot LowStar.Monotonic.Buffer.loc | {
"end_col": 61,
"end_line": 536,
"start_col": 2,
"start_line": 536
} |
FStar.Pervasives.Lemma | val modifies_liveness_insensitive_buffer_weak
(l: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[
SMTPatOr
[
[SMTPat (live h x); SMTPat (modifies l h h')];
[SMTPat (live h' x); SMTPat (modifies l h h')]
]
] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x | val modifies_liveness_insensitive_buffer_weak
(l: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[
SMTPatOr
[
[SMTPat (live h x); SMTPat (modifies l h h')];
[SMTPat (live h' x); SMTPat (modifies l h h')]
]
]
let modifies_liveness_insensitive_buffer_weak
(l: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[
SMTPatOr
[
[SMTPat (live h x); SMTPat (modifies l h h')];
[SMTPat (live h' x); SMTPat (modifies l h h')]
]
] = | false | null | true | modifies_liveness_insensitive_buffer loc_none l h h' x | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.modifies_liveness_insensitive_buffer",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.address_liveness_insensitive_locs",
"LowStar.Monotonic.Buffer.live",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat_or",
"Prims.list",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');]; | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_liveness_insensitive_buffer_weak
(l: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[
SMTPatOr
[
[SMTPat (live h x); SMTPat (modifies l h h')];
[SMTPat (live h' x); SMTPat (modifies l h h')]
]
] | [] | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_buffer_weak | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
l: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l h h' /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.address_liveness_insensitive_locs
l /\ LowStar.Monotonic.Buffer.live h x)
(ensures LowStar.Monotonic.Buffer.live h' x)
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.live h x);
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
];
[
SMTPat (LowStar.Monotonic.Buffer.live h' x);
SMTPat (LowStar.Monotonic.Buffer.modifies l h h')
]
]
] | {
"end_col": 58,
"end_line": 1207,
"start_col": 4,
"start_line": 1207
} |
FStar.Pervasives.Lemma | val loc_includes_union_l_buffer
(s1 s2: loc)
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
: Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b) | val loc_includes_union_l_buffer
(s1 s2: loc)
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
: Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
let loc_includes_union_l_buffer
(s1 s2: loc)
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
: Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))] = | false | null | true | loc_includes_union_l s1 s2 (loc_buffer b) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"LowStar.Monotonic.Buffer.loc_buffer",
"Prims.unit",
"Prims.l_or",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b))) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_includes_union_l_buffer
(s1 s2: loc)
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
: Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))] | [] | LowStar.Monotonic.Buffer.loc_includes_union_l_buffer | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc ->
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes s1 (LowStar.Monotonic.Buffer.loc_buffer b) \/
LowStar.Monotonic.Buffer.loc_includes s2 (LowStar.Monotonic.Buffer.loc_buffer b))
(ensures
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_buffer b))
[
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union s1 s2)
(LowStar.Monotonic.Buffer.loc_buffer b))
] | {
"end_col": 45,
"end_line": 854,
"start_col": 4,
"start_line": 854
} |
FStar.Pervasives.Lemma | val modifies_liveness_insensitive_region_mreference_weak
(l2: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))]
]
] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x | val modifies_liveness_insensitive_region_mreference_weak
(l2: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))]
]
]
let modifies_liveness_insensitive_region_mreference_weak
(l2: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))]
]
] = | false | null | true | modifies_liveness_insensitive_region_mreference loc_none l2 h h' x | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_mreference",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.region_liveness_insensitive_locs",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"FStar.Monotonic.HyperStack.frameOf",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat_or",
"Prims.list",
"FStar.Pervasives.smt_pat",
"Prims.bool",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))]; | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_liveness_insensitive_region_mreference_weak
(l2: loc)
(h h': HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))]
]
] | [] | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_mreference_weak | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
l2: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: FStar.Monotonic.HyperStack.mreference t pre
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l2 h h' /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.region_liveness_insensitive_locs
l2 /\ FStar.Monotonic.HyperStack.live_region h (FStar.Monotonic.HyperStack.frameOf x))
(ensures FStar.Monotonic.HyperStack.live_region h' (FStar.Monotonic.HyperStack.frameOf x))
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h
(FStar.Monotonic.HyperStack.frameOf x))
];
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h'
(FStar.Monotonic.HyperStack.frameOf x))
]
]
] | {
"end_col": 70,
"end_line": 1274,
"start_col": 4,
"start_line": 1274
} |
Prims.GTot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0 | let deref (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (x: mpointer a rrel rel) = | false | null | false | get h x 0 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.mpointer",
"LowStar.Monotonic.Buffer.get"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val deref : h: FStar.Monotonic.HyperStack.mem -> x: LowStar.Monotonic.Buffer.mpointer a rrel rel -> Prims.GTot a | [] | LowStar.Monotonic.Buffer.deref | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h: FStar.Monotonic.HyperStack.mem -> x: LowStar.Monotonic.Buffer.mpointer a rrel rel -> Prims.GTot a | {
"end_col": 11,
"end_line": 1777,
"start_col": 2,
"start_line": 1777
} |
|
FStar.Pervasives.Lemma | val unused_in_not_unused_in_disjoint_2 (l1 l2 l1' l2': loc) (h: HS.mem)
: Lemma
(requires
((loc_unused_in h) `loc_includes` l1 /\ (loc_not_unused_in h) `loc_includes` l2 /\
l1 `loc_includes` l1' /\ l2 `loc_includes` l2'))
(ensures (loc_disjoint l1' l2'))
[
SMTPat (loc_disjoint l1' l2');
SMTPat ((loc_unused_in h) `loc_includes` l1);
SMTPat ((loc_not_unused_in h) `loc_includes` l2)
] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2' | val unused_in_not_unused_in_disjoint_2 (l1 l2 l1' l2': loc) (h: HS.mem)
: Lemma
(requires
((loc_unused_in h) `loc_includes` l1 /\ (loc_not_unused_in h) `loc_includes` l2 /\
l1 `loc_includes` l1' /\ l2 `loc_includes` l2'))
(ensures (loc_disjoint l1' l2'))
[
SMTPat (loc_disjoint l1' l2');
SMTPat ((loc_unused_in h) `loc_includes` l1);
SMTPat ((loc_not_unused_in h) `loc_includes` l2)
]
let unused_in_not_unused_in_disjoint_2 (l1 l2 l1' l2': loc) (h: HS.mem)
: Lemma
(requires
((loc_unused_in h) `loc_includes` l1 /\ (loc_not_unused_in h) `loc_includes` l2 /\
l1 `loc_includes` l1' /\ l2 `loc_includes` l2'))
(ensures (loc_disjoint l1' l2'))
[
SMTPat (loc_disjoint l1' l2');
SMTPat ((loc_unused_in h) `loc_includes` l1);
SMTPat ((loc_not_unused_in h) `loc_includes` l2)
] = | false | null | true | loc_includes_trans (loc_unused_in h) l1 l1';
loc_includes_trans (loc_not_unused_in h) l2 l2';
loc_unused_in_not_unused_in_disjoint h;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2' | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.loc_disjoint_includes",
"LowStar.Monotonic.Buffer.loc_unused_in",
"LowStar.Monotonic.Buffer.loc_not_unused_in",
"Prims.unit",
"LowStar.Monotonic.Buffer.loc_unused_in_not_unused_in_disjoint",
"LowStar.Monotonic.Buffer.loc_includes_trans",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_disjoint",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' )) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val unused_in_not_unused_in_disjoint_2 (l1 l2 l1' l2': loc) (h: HS.mem)
: Lemma
(requires
((loc_unused_in h) `loc_includes` l1 /\ (loc_not_unused_in h) `loc_includes` l2 /\
l1 `loc_includes` l1' /\ l2 `loc_includes` l2'))
(ensures (loc_disjoint l1' l2'))
[
SMTPat (loc_disjoint l1' l2');
SMTPat ((loc_unused_in h) `loc_includes` l1);
SMTPat ((loc_not_unused_in h) `loc_includes` l2)
] | [] | LowStar.Monotonic.Buffer.unused_in_not_unused_in_disjoint_2 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
l1: LowStar.Monotonic.Buffer.loc ->
l2: LowStar.Monotonic.Buffer.loc ->
l1': LowStar.Monotonic.Buffer.loc ->
l2': LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_unused_in h) l1 /\
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_not_unused_in h) l2 /\
LowStar.Monotonic.Buffer.loc_includes l1 l1' /\ LowStar.Monotonic.Buffer.loc_includes l2 l2'
)
(ensures LowStar.Monotonic.Buffer.loc_disjoint l1' l2')
[
SMTPat (LowStar.Monotonic.Buffer.loc_disjoint l1' l2');
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_unused_in h) l1);
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_not_unused_in h)
l2)
] | {
"end_col": 71,
"end_line": 1654,
"start_col": 2,
"start_line": 1651
} |
Prims.GTot | val g_upd
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(i: nat{i < length b})
(v: a)
(h: HS.mem{live h b})
: GTot HS.mem | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h | val g_upd
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(i: nat{i < length b})
(v: a)
(h: HS.mem{live h b})
: GTot HS.mem
let g_upd
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(i: nat{i < length b})
(v: a)
(h: HS.mem{live h b})
: GTot HS.mem = | false | null | false | g_upd_seq b (Seq.upd (as_seq h b) i v) h | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThan",
"LowStar.Monotonic.Buffer.length",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.live",
"LowStar.Monotonic.Buffer.g_upd_seq",
"FStar.Seq.Base.upd",
"LowStar.Monotonic.Buffer.as_seq"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b}) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val g_upd
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(i: nat{i < length b})
(v: a)
(h: HS.mem{live h b})
: GTot HS.mem | [] | LowStar.Monotonic.Buffer.g_upd | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
i: Prims.nat{i < LowStar.Monotonic.Buffer.length b} ->
v: a ->
h: FStar.Monotonic.HyperStack.mem{LowStar.Monotonic.Buffer.live h b}
-> Prims.GTot FStar.Monotonic.HyperStack.mem | {
"end_col": 44,
"end_line": 1874,
"start_col": 4,
"start_line": 1874
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s) | let alloc_partial_post_mem_common
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h0 h1: HS.mem)
(s: Seq.seq a)
= | false | null | false | (g_is_null b /\ h0 == h1) \/ ((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.seq",
"Prims.l_or",
"Prims.l_and",
"Prims.b2t",
"LowStar.Monotonic.Buffer.g_is_null",
"Prims.eq2",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.alloc_post_mem_common",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val alloc_partial_post_mem_common : b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem ->
s: FStar.Seq.Base.seq a
-> Prims.logical | [] | LowStar.Monotonic.Buffer.alloc_partial_post_mem_common | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem ->
s: FStar.Seq.Base.seq a
-> Prims.logical | {
"end_col": 60,
"end_line": 2107,
"start_col": 4,
"start_line": 2106
} |
|
FStar.Pervasives.Lemma | val freeable_disjoint'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2 | val freeable_disjoint'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
let freeable_disjoint'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)] = | false | null | true | freeable_disjoint b1 b2 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.freeable_disjoint",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.freeable",
"Prims.b2t",
"Prims.op_GreaterThan",
"LowStar.Monotonic.Buffer.length",
"LowStar.Monotonic.Buffer.disjoint",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.Monotonic.Buffer.loc_addr_of_buffer",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2))) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val freeable_disjoint'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
: Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)] | [] | LowStar.Monotonic.Buffer.freeable_disjoint' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b1: LowStar.Monotonic.Buffer.mbuffer a1 rrel1 rel1 ->
b2: LowStar.Monotonic.Buffer.mbuffer a2 rrel2 rel2
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.freeable b1 /\ LowStar.Monotonic.Buffer.length b2 > 0 /\
LowStar.Monotonic.Buffer.disjoint b1 b2)
(ensures
LowStar.Monotonic.Buffer.loc_disjoint (LowStar.Monotonic.Buffer.loc_addr_of_buffer b1)
(LowStar.Monotonic.Buffer.loc_addr_of_buffer b2))
[
SMTPat (LowStar.Monotonic.Buffer.freeable b1);
SMTPat (LowStar.Monotonic.Buffer.disjoint b1 b2)
] | {
"end_col": 27,
"end_line": 2052,
"start_col": 4,
"start_line": 2052
} |
FStar.HyperStack.ST.ST | val mgcmalloc_of_list_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: list a)
: HST.ST
(b: lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r {recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init))) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mgcmalloc_of_list_partial (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r{recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init)))
= mgcmalloc_of_list r init | val mgcmalloc_of_list_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: list a)
: HST.ST
(b: lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r {recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init)))
let mgcmalloc_of_list_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: list a)
: HST.ST
(b: lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r {recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init))) = | true | null | false | mgcmalloc_of_list r init | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperHeap.rid",
"Prims.list",
"LowStar.Monotonic.Buffer.mgcmalloc_of_list",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.Pervasives.normalize_term",
"Prims.nat",
"FStar.List.Tot.Base.length",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"LowStar.Monotonic.Buffer.lmbuffer_or_null",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.gcmalloc_of_list_pre",
"LowStar.Monotonic.Buffer.alloc_partial_post_mem_common",
"FStar.Seq.Properties.seq_of_list"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len
/// ``alloca init len`` allocates a buffer of some positive length ``len``
/// in the current stack frame. Every cell of this buffer will have
/// initial contents ``init``. Such a buffer cannot be freed
/// individually, but is automatically freed as soon as its stack
/// frame is deallocated by ``HST.pop_frame``.
unfold let alloca_pre (len:U32.t) = U32.v len > 0
(*
* See the Allocation comment above when changing the spec
*)
val malloca (#a:Type0) (#rrel:srel a)
(init:a) (len:U32.t)
:HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> alloca_pre len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init) /\
frameOf b == HS.get_tip h0))
(*
* Allocate a stack buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val malloca_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.StackInline (lmbuffer a rrel rrel (U32.v len))
(requires fun h0 ->
alloca_pre len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)) /\
frameOf b == HS.get_tip h0)
/// ``alloca_of_list init`` allocates a buffer in the current stack
/// frame. The initial values of the cells of this buffer are
/// specified by the ``init`` list, which must be nonempty, and of
/// length representable as a machine integer.
unfold let alloca_of_list_pre (#a:Type0) (init:list a) =
normalize (0 < FStar.List.Tot.length init) /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val malloca_of_list (#a:Type0) (#rrel:srel a) (init: list a)
:HST.StackInline (lmbuffer a rrel rrel (normalize_term (List.Tot.length init)))
(requires (fun _ -> alloca_of_list_pre init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init) /\
frameOf b == HS.get_tip h0))
unfold let gcmalloc_of_list_pre (#a:Type0) (r:HS.rid) (init:list a) =
HST.is_eternal_region r /\
normalize (FStar.List.Tot.length init <= UInt.max_int 32)
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc_of_list (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer a rrel rrel (normalize_term (List.Tot.length init)){frameOf b == r /\ recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.seq_of_list init)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_of_list_partial (#a:Type0) (#rrel:srel a) (r:HS.rid) (init:list a)
:HST.ST (b:lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r{recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init))) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mgcmalloc_of_list_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: list a)
: HST.ST
(b: lmbuffer_or_null a rrel rrel (normalize_term (List.Tot.length init)) r {recallable b})
(requires (fun _ -> gcmalloc_of_list_pre r init))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.seq_of_list init))) | [] | LowStar.Monotonic.Buffer.mgcmalloc_of_list_partial | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> init: Prims.list a
-> FStar.HyperStack.ST.ST
(b:
LowStar.Monotonic.Buffer.lmbuffer_or_null a
rrel
rrel
(FStar.Pervasives.normalize_term (FStar.List.Tot.Base.length init))
r {LowStar.Monotonic.Buffer.recallable b}) | {
"end_col": 28,
"end_line": 2272,
"start_col": 4,
"start_line": 2272
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0 | let malloc_pre (r: HS.rid) (len: U32.t) = | false | null | false | HST.is_eternal_region r /\ U32.v len > 0 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"total"
] | [
"FStar.Monotonic.HyperHeap.rid",
"FStar.UInt32.t",
"Prims.l_and",
"FStar.HyperStack.ST.is_eternal_region",
"Prims.b2t",
"Prims.op_GreaterThan",
"FStar.UInt32.v",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s) | false | true | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val malloc_pre : r: FStar.Monotonic.HyperHeap.rid -> len: FStar.UInt32.t -> Prims.logical | [] | LowStar.Monotonic.Buffer.malloc_pre | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> len: FStar.UInt32.t -> Prims.logical | {
"end_col": 87,
"end_line": 2110,
"start_col": 47,
"start_line": 2110
} |
|
FStar.HyperStack.ST.ST | val mgcmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len | val mgcmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
let mgcmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) = | true | null | false | mgcmalloc r init len | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperHeap.rid",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.mgcmalloc",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.UInt32.v",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable",
"LowStar.Monotonic.Buffer.lmbuffer_or_null",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.malloc_pre",
"LowStar.Monotonic.Buffer.alloc_partial_post_mem_common",
"FStar.Seq.Base.create"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len)) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mgcmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) | [] | LowStar.Monotonic.Buffer.mgcmalloc_partial | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> init: a -> len: FStar.UInt32.t
-> FStar.HyperStack.ST.ST
(b:
LowStar.Monotonic.Buffer.lmbuffer_or_null a rrel rrel (FStar.UInt32.v len) r
{LowStar.Monotonic.Buffer.recallable b}) | {
"end_col": 24,
"end_line": 2152,
"start_col": 4,
"start_line": 2152
} |
FStar.Pervasives.Lemma | val live_is_null (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (b: mbuffer a rrel rel)
: Lemma (requires (g_is_null b == true)) (ensures (live h b)) [SMTPat (live h b)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h | val live_is_null (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (b: mbuffer a rrel rel)
: Lemma (requires (g_is_null b == true)) (ensures (live h b)) [SMTPat (live h b)]
let live_is_null (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (b: mbuffer a rrel rel)
: Lemma (requires (g_is_null b == true)) (ensures (live h b)) [SMTPat (live h b)] = | false | null | true | null_unique b;
live_null a rrel rel h | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.live_null",
"Prims.unit",
"LowStar.Monotonic.Buffer.null_unique",
"Prims.eq2",
"Prims.bool",
"LowStar.Monotonic.Buffer.g_is_null",
"Prims.squash",
"LowStar.Monotonic.Buffer.live",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b)) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val live_is_null (#a: Type0) (#rrel #rel: srel a) (h: HS.mem) (b: mbuffer a rrel rel)
: Lemma (requires (g_is_null b == true)) (ensures (live h b)) [SMTPat (live h b)] | [] | LowStar.Monotonic.Buffer.live_is_null | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h: FStar.Monotonic.HyperStack.mem -> b: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma (requires LowStar.Monotonic.Buffer.g_is_null b == true)
(ensures LowStar.Monotonic.Buffer.live h b)
[SMTPat (LowStar.Monotonic.Buffer.live h b)] | {
"end_col": 26,
"end_line": 124,
"start_col": 4,
"start_line": 123
} |
FStar.Pervasives.Lemma | val length_null_2 (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Lemma (requires (g_is_null b == true)) (ensures (length b == 0)) [SMTPat (g_is_null b)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b | val length_null_2 (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Lemma (requires (g_is_null b == true)) (ensures (length b == 0)) [SMTPat (g_is_null b)]
let length_null_2 (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Lemma (requires (g_is_null b == true)) (ensures (length b == 0)) [SMTPat (g_is_null b)] = | false | null | true | len_null a rrel rel;
null_unique b | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.null_unique",
"Prims.unit",
"LowStar.Monotonic.Buffer.len_null",
"Prims.eq2",
"Prims.bool",
"LowStar.Monotonic.Buffer.g_is_null",
"Prims.squash",
"Prims.int",
"LowStar.Monotonic.Buffer.length",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0)) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val length_null_2 (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Lemma (requires (g_is_null b == true)) (ensures (length b == 0)) [SMTPat (g_is_null b)] | [] | LowStar.Monotonic.Buffer.length_null_2 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma (requires LowStar.Monotonic.Buffer.g_is_null b == true)
(ensures LowStar.Monotonic.Buffer.length b == 0)
[SMTPat (LowStar.Monotonic.Buffer.g_is_null b)] | {
"end_col": 17,
"end_line": 223,
"start_col": 4,
"start_line": 222
} |
FStar.HyperStack.ST.ST | val mmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mmalloc r init len | val mmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
let mmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) = | true | null | false | mmalloc r init len | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperHeap.rid",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.mmalloc",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.UInt32.v",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.freeable",
"LowStar.Monotonic.Buffer.lmbuffer_or_null",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.malloc_pre",
"LowStar.Monotonic.Buffer.alloc_partial_post_mem_common",
"FStar.Seq.Base.create"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1)
val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
val unused_in_loc_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (unused_in b h))
(ensures (loc_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (unused_in b h)]
val modifies_address_liveness_insensitive_unused_in
(h h' : HS.mem)
: Lemma
(requires (modifies (address_liveness_insensitive_locs) h h'))
(ensures (loc_not_unused_in h' `loc_includes` loc_not_unused_in h /\ loc_unused_in h `loc_includes` loc_unused_in h'))
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_not_unused_in
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (modifies (loc_union (loc_unused_in h) l) h h'))
(ensures (modifies l h h'))
val mreference_live_loc_not_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (h `HS.contains` r))
(ensures (loc_not_unused_in h `loc_includes` loc_freed_mreference r /\ loc_not_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.contains h r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_not_unused_in h `loc_includes` loc_freed_mreference r)];
]]
val mreference_unused_in_loc_unused_in
(#t: Type)
(#pre: Preorder.preorder t)
(h: HS.mem)
(r: HS.mreference t pre)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (loc_unused_in h `loc_includes` loc_freed_mreference r /\ loc_unused_in h `loc_includes` loc_mreference r))
[SMTPatOr [
[SMTPat (HS.unused_in r h)];
[SMTPat (loc_unused_in h `loc_includes` loc_mreference r)];
[SMTPat (loc_unused_in h `loc_includes` loc_freed_mreference r)];
]]
let unused_in_not_unused_in_disjoint_2
(l1 l2 l1' l2': loc)
(h: HS.mem)
: Lemma
(requires (loc_unused_in h `loc_includes` l1 /\ loc_not_unused_in h `loc_includes` l2 /\ l1 `loc_includes` l1' /\ l2 `loc_includes` l2' ))
(ensures (loc_disjoint l1' l2' ))
[SMTPat (loc_disjoint l1' l2'); SMTPat (loc_unused_in h `loc_includes` l1); SMTPat (loc_not_unused_in h `loc_includes` l2)]
= loc_includes_trans (loc_unused_in h) l1 l1' ;
loc_includes_trans (loc_not_unused_in h) l2 l2' ;
loc_unused_in_not_unused_in_disjoint h ;
loc_disjoint_includes (loc_unused_in h) (loc_not_unused_in h) l1' l2'
val modifies_loc_unused_in
(l: loc)
(h1 h2: HS.mem)
(l' : loc)
: Lemma
(requires (
modifies l h1 h2 /\
address_liveness_insensitive_locs `loc_includes` l /\
loc_unused_in h2 `loc_includes` l'
))
(ensures (loc_unused_in h1 `loc_includes` l'))
[SMTPatOr [
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h2 `loc_includes` l')];
[SMTPat (modifies l h1 h2); SMTPat (loc_unused_in h1 `loc_includes` l')];
]]
/// Shorthand: freshness
let fresh_loc (l: loc) (h h' : HS.mem) : GTot Type0 =
loc_unused_in h `loc_includes` l /\
loc_not_unused_in h' `loc_includes` l
let ralloc_post_fresh_loc (#a:Type) (#rel:Preorder.preorder a) (i: HS.rid) (init:a) (m0: HS.mem)
(x: HST.mreference a rel{HST.is_eternal_region (HS.frameOf x)}) (m1: HS.mem) : Lemma
(requires (HST.ralloc_post i init m0 x m1))
(ensures (fresh_loc (loc_freed_mreference x) m0 m1))
[SMTPat (HST.ralloc_post i init m0 x m1)]
= ()
//AR: this is needed for liveness across fresh_frame
val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
val popped_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.popped h0 h1))
(ensures (modifies (loc_region_only false (HS.get_tip h0)) h0 h1))
[SMTPat (HS.popped h0 h1)]
val modifies_remove_new_locs (l_fresh l_aux l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (fresh_loc l_fresh h1 h2 /\
modifies l_aux h1 h2 /\
l_goal `loc_includes` l_aux /\
modifies (loc_union l_fresh l_goal) h2 h3))
(ensures (modifies l_goal h1 h3))
[SMTPat (fresh_loc l_fresh h1 h2);
SMTPat (modifies l_aux h1 h2);
SMTPat (modifies l_goal h1 h3)]
(*
* AR: this lemma is framing the modifies clause across a fresh frame
* one way to do it would have been to reuse the lemma modifies_remove_new_locs,
* treating the fresh frame as another new location
* however, the way library is set up, loc_region in any form cannot be considered
* a fresh loc
* so, we have a special lemma for fresh_frame
*)
let modifies_remove_fresh_frame (h1 h2 h3:HS.mem) (l:loc)
: Lemma (requires (HS.fresh_frame h1 h2 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h2)) l) h2 h3))
(ensures (modifies l h1 h3))
[SMTPat (modifies l h1 h3); SMTPat (HS.fresh_frame h1 h2)]
= loc_regions_unused_in h1 (HS.mod_set (Set.singleton (HS.get_tip h2)));
modifies_only_not_unused_in l h1 h3
/// Legacy shorthands for disjointness and inclusion of buffers
///
let disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_disjoint (loc_buffer b1) (loc_buffer b2)
let includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2) :GTot Type0 =
loc_includes (loc_buffer b1) (loc_buffer b2) /\
(g_is_null b1 <==> g_is_null b2)
val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
(*
/// The liveness of a sub-buffer entails from the liveness
/// of its enclosing buffer.
val includes_live (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (live h larger ==> live h smaller))
[SMTPatOr [
[SMTPat (includes larger smaller); SMTPat (live h larger)];
[SMTPat (includes larger smaller); SMTPat (live h smaller)];
]]
*)
val includes_frameOf_as_addr (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (g_is_null larger == g_is_null smaller /\ frameOf larger == frameOf smaller /\ as_addr larger == as_addr smaller))
[SMTPat (larger `includes` smaller)]
///
/// Useful shorthands for pointers, or maybe-null pointers
inline_for_extraction
type mpointer (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{length b == 1}
inline_for_extraction
type mpointer_or_null (a:Type0) (rrel:srel a) (rel:srel a) =
b:mbuffer a rrel rel{if g_is_null b then True else length b == 1}
unfold
let deref (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (x:mpointer a rrel rel) =
get h x 0
/// Two pointers having different contents are disjoint. This is valid
/// only for pointers, i.e. buffers of size 1.
val pointer_distinct_sel_disjoint
(#a:Type0) (#rrel1 #rrel2 #rel1 #rel2:srel a)
(b1:mpointer a rrel1 rel1)
(b2:mpointer a rrel2 rel2)
(h:HS.mem)
:Lemma (requires (live h b1 /\ live h b2 /\ get h b1 0 =!= get h b2 0))
(ensures (disjoint b1 b2))
/// The following stateful operations on buffers do not change the
/// memory, but, as required by the C standard, they all require the
/// buffer in question to be live.
/// The nullity test, ``is_null b``, which KaRaMeL compiles to C as ``b == NULL``.
val is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack bool (requires (fun h -> live h b))
(ensures (fun h y h' -> h == h' /\ y == g_is_null b))
/// ``sub b i len`` constructs the sub-buffer of ``b`` starting from
/// offset ``i`` with length ``len``. KaRaMeL extracts this operation as
/// ``b + i`` (or, equivalently, ``&b[i]``.)
val msub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t) (len:Ghost.erased U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i + U32.v (Ghost.reveal len) <= length b /\ compatible_sub b i (Ghost.reveal len) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (Ghost.reveal len)))
/// ``offset b i`` construct the tail of the buffer ``b`` starting from
/// offset ``i``, i.e. the sub-buffer of ``b`` starting from offset ``i``
/// with length ``U32.sub (len b) i``. KaRaMeL compiles it as ``b + i`` or
/// ``&b[i]``.
///
/// This stateful operation cannot be derived from ``sub``, because the
/// length cannot be computed outside of proofs.
val moffset (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a) (b:mbuffer a rrel rel)
(i:U32.t)
:HST.Stack (mbuffer a rrel sub_rel)
(requires (fun h -> U32.v i <= length b /\ compatible_sub b i (U32.sub (len b) i) sub_rel /\ live h b))
(ensures (fun h y h' -> h == h' /\ y == mgsub sub_rel b i (U32.sub (len b) i)))
// goffset
/// ``index b i`` reads the value of ``b`` at offset ``i`` from memory and
/// returns it. KaRaMeL compiles it as b[i].
val index (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (i:U32.t)
:HST.Stack a (requires (fun h -> live h b /\ U32.v i < length b))
(ensures (fun h y h' -> h == h' /\ y == Seq.index (as_seq h b) (U32.v i)))
/// The following stateful operations on buffers modify the memory,
/// and, as usual, require the liveness of the buffer.
/// ``g_upd_seq b s h`` updates the entire buffer `b`'s contents in
/// heap `h` to correspond to the sequence `s`
val g_upd_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (s:Seq.lseq a (length b))
(h:HS.mem{live h b})
:GTot HS.mem
val lemma_g_upd_with_same_seq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b)) (ensures (g_upd_seq b (as_seq h b) h == h))
/// A lemma specifying `g_upd_seq` in terms of its effect on the
/// buffer's underlying sequence
val g_upd_seq_as_seq (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(s:Seq.lseq a (length b))
(h:HS.mem{live h b})
: Lemma (let h' = g_upd_seq b s h in
(Seq.length s > 0 ==> not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
HST.equal_domains h h' /\
as_seq h' b == s)
/// ``g_upd b i v h`` updates the buffer `b` in heap `h` at location
/// `i` writing ``v`` there. This is the spec analog of the stateful
/// update `upd` below.
let g_upd (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: GTot HS.mem
= g_upd_seq b (Seq.upd (as_seq h b) i v) h
val g_upd_modifies_strong (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:nat{i < length b})
(v:a)
(h:HS.mem{live h b})
: Lemma (modifies (loc_buffer_from_to b (U32.uint_to_t i) (U32.uint_to_t (i + 1))) h (g_upd b i v h))
/// ``upd b i v`` writes ``v`` to the memory, at offset ``i`` of
/// buffer ``b``. KaRaMeL compiles it as ``b[i] = v``.
val upd'
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
:HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> h' == g_upd b (U32.v i) v h))
inline_for_extraction
let upd
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i:U32.t)
(v:a)
: HST.Stack unit (requires (fun h -> live h b /\ U32.v i < length b /\
rel (as_seq h b) (Seq.upd (as_seq h b) (U32.v i) v)))
(ensures (fun h _ h' -> (not (g_is_null b)) /\
modifies (loc_buffer b) h h' /\
live h' b /\
as_seq h' b == Seq.upd (as_seq h b) (U32.v i) v))
= let h = HST.get () in
upd' b i v;
g_upd_seq_as_seq b (Seq.upd (as_seq h b) (U32.v i) v) h
(* FIXME: Comment on `recall` *)
val recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val region_lifetime_buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Type0
(*
* A functoriality lemma
*)
unfold
let rrel_rel_always_compatible (#a:Type0) (rrel rel:srel a) =
forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_subseq_preorder len rrel i j rel
val region_lifetime_sub (#a:Type0) (#rrel #rel #subrel:srel a)
(b0:mbuffer a rrel rel)
(b1:mbuffer a rrel subrel)
: Lemma
(requires rrel_rel_always_compatible rrel subrel)
(ensures
(region_lifetime_buf b0 /\
(exists i len. U32.v i + U32.v len <= length b0 /\ b1 == mgsub subrel b0 i len)) ==> region_lifetime_buf b1)
val recallable_null (#a:Type0) (#rrel #rel:srel a)
:Lemma (recallable (mnull #a #rrel #rel)) [SMTPat (recallable (mnull #a #rrel #rel))]
(*
val recallable_includes (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(larger:mbuffer a1 rrel1 rel1) (smaller:mbuffer a2 rrel2 rel2)
:Lemma (requires (larger `includes` smaller))
(ensures (recallable larger <==> recallable smaller))
[SMTPatOr [
[SMTPat (recallable larger); SMTPat (recallable smaller);];
[SMTPat (larger `includes` smaller)];
]]
*)
val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
(*
* Begin: API for general witness and recall
* Clients can witness predicates on the contents of the buffer, and later recall them
* Provided the predicates are stable w.r.t. the buffer preorder
*)
(* Shorthand for predicates of Seq.seq a *)
unfold let spred (a:Type0) = Seq.seq a -> Type0
(*
* Note the tight patterns on the quantifier, you may need to write additional triggers
* if you are directly working with them
*)
unfold let stable_on (#a:Type0) (p:spred a) (rel:srel a) =
forall (s1 s2:Seq.seq a).{:pattern (p s1); (rel s1 s2); (p s2)} (p s1 /\ rel s1 s2) ==> p s2
(* Clients get this pure token when they witness a predicate *)
val witnessed (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a) :Type0
(*
* We can only support witness and recall for gc-malloced buffers (i.e. recallable ones)
* This is not a fundamental limitation, but needs some tweaks to the underlying state model
*)
val witness_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> p (as_seq h0 b) /\ p `stable_on` rel))
(ensures (fun h0 _ h1 -> h0 == h1 /\ b `witnessed` p))
val recall_p (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.ST unit (requires (fun h0 -> (recallable b \/ live h0 b) /\ b `witnessed` p))
(ensures (fun h0 _ h1 -> h0 == h1 /\ live h0 b /\ p (as_seq h0 b)))
val witnessed_functorial (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: Lemma
(requires
rrel_rel_always_compatible rrel rel1 /\ //e.g. trivial_preorder, immutable preorder etc.
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\ //the underlying allocation unit for b1 and b2 must be the same
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures witnessed b2 s2)
(*
* A stateful version that relaxes the rrel and rel compatibility
* but requires liveness of b1
*)
val witnessed_functorial_st (#a:Type0)
(#rrel #rel1 #rel2:srel a)
(b1:mbuffer a rrel rel1) (b2:mbuffer a rrel rel2) (i len:U32.t)
(s1 s2:spred a)
: HST.Stack unit
(requires fun h ->
live h b1 /\
U32.v i + U32.v len <= length b1 /\
b2 == mgsub rel2 b1 i len /\
witnessed b1 s1 /\
(forall h. s1 (as_seq h b1) ==> s2 (as_seq h b2)))
(ensures fun h0 _ h1 -> h0 == h1 /\ witnessed b2 s2)
(* End: API for general witness and recall *)
/// Deallocation. A buffer that was allocated by ``malloc`` (see below)
/// can be ``free`` d.
val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
val free (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.ST unit (requires (fun h0 -> live h0 b /\ freeable b))
(ensures (fun h0 _ h1 -> (not (g_is_null b)) /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies (loc_addr_of_buffer b) h0 h1 /\
HS.live_region h1 (frameOf b)))
val freeable_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (freeable b)) (ensures (length b > 0))
[SMTPat (freeable b)]
val freeable_disjoint (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (frameOf b1 <> frameOf b2 \/ as_addr b1 <> as_addr b2))
let freeable_disjoint' (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (freeable b1 /\ length b2 > 0 /\ disjoint b1 b2))
(ensures (loc_disjoint (loc_addr_of_buffer b1) (loc_addr_of_buffer b2)))
[SMTPat (freeable b1); SMTPat (disjoint b1 b2)]
= freeable_disjoint b1 b2
(***** Begin allocation functions *****)
/// Allocation. This is the common postcondition of all allocation
/// operators, which tells that the resulting buffer is fresh, and
/// specifies its initial contents.
(*
* Allocation functions:
* In the return type, we try to give heap-independent postconditions (such as length)
* in the refinement of the buffer type (for the usage pattern of top-level buffers)
* while heap dependent postconditions are provided in the ensures clause
*
* One unsatisfying aspect is that these functions are duplicated in the wrappers that we write
* (e.g. Buffer, ImmutableBuffer, etc.)
* If we don't duplicate, then the clients may face type inference issues (for preorders)
*
* So, if you change any of the pre- or postcondition, you should change the pre and post spec functions
* (such as alloc_post_mem_common etc.), rather than the specs directly
* Perhaps we can rely on F* type inference and not write specs explicitly in those wrappers?
* Will try that
*
* For memory dependent post, alloc_post_mem_common is the one used by everyone
*
* For heap allocations, the library also provides partial functions that could return null
* Clients need to explicitly check for non-null values when using these functions
* Partial function specs use alloc_partial_post_mem_common
*
* NOTE: a useful test for the implementation of partial functions is that
* their spec should be valid even when their implementation just returns null
*)
unfold let lmbuffer (a:Type0) (rrel rel:srel a) (len:nat)
= b:mbuffer a rrel rel{length b == len /\ not (g_is_null b)}
unfold
let alloc_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= live h1 b /\
unused_in b h0 /\
Map.domain (HS.get_hmap h1) `Set.equal` Map.domain (HS.get_hmap h0) /\
(HS.get_tip h1) == (HS.get_tip h0) /\
modifies loc_none h0 h1 /\
as_seq h1 b == s
(* Return type and post for partial allocation functions *)
unfold let lmbuffer_or_null (a:Type0) (rrel rel:srel a) (len:nat) (r:HS.rid)
= b:mbuffer a rrel rel{(not (g_is_null b)) ==> (length b == len /\ frameOf b == r)}
unfold let alloc_partial_post_mem_common (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h0 h1:HS.mem) (s:Seq.seq a)
= (g_is_null b /\ h0 == h1) \/
((not (g_is_null b)) /\ alloc_post_mem_common b h0 h1 s)
unfold let malloc_pre (r:HS.rid) (len:U32.t) = HST.is_eternal_region r /\ U32.v len > 0
/// ``gcmalloc r init len`` allocates a memory-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer cannot be
/// freed. In fact, it is eternal: it cannot be deallocated at all.
(*
* See the Allocation comment above when changing the spec
*)
val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a memory-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mgcmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mgcmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
= mgcmalloc r init len
/// ``malloc r init len`` allocates a hand-managed buffer of some
/// positive length ``len`` in an eternal region ``r``. Every cell of this
/// buffer will have initial contents ``init``. Such a buffer can be
/// freed using ``free`` above. Note that the ``freeable`` permission is
/// only on the whole buffer ``b``, and is not inherited by any of its
/// strict sub-buffers.
(*
* See the Allocation comment above when changing the spec
*)
val mmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
(*
* Allocate a hand-managed buffer initialized with contents from src
*
* This allocates and initializes the buffer atomically (from the perspective of the Low* clients)
*)
val mmalloc_and_blit (#a:Type0) (#rrel:srel a) (r:HS.rid)
(#rrel1 #rel1:srel a) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ freeable b})
(requires fun h0 ->
malloc_pre r len /\
live h0 src /\ U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
(*
* See the Allocation comment above when changing the spec
*)
inline_for_extraction
let mmalloc_partial (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer_or_null a rrel rrel (U32.v len) r{(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len)) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mmalloc_partial (#a: Type0) (#rrel: srel a) (r: HS.rid) (init: a) (len: U32.t)
: HST.ST (b: lmbuffer_or_null a rrel rrel (U32.v len) r {(not (g_is_null b)) ==> freeable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_partial_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) | [] | LowStar.Monotonic.Buffer.mmalloc_partial | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> init: a -> len: FStar.UInt32.t
-> FStar.HyperStack.ST.ST
(b:
LowStar.Monotonic.Buffer.lmbuffer_or_null a rrel rrel (FStar.UInt32.v len) r
{ Prims.op_Negation (LowStar.Monotonic.Buffer.g_is_null b) ==>
LowStar.Monotonic.Buffer.freeable b }) | {
"end_col": 22,
"end_line": 2195,
"start_col": 4,
"start_line": 2195
} |
FStar.Pervasives.Lemma | val length_null_1 (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false)) [SMTPat (length b)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b | val length_null_1 (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false)) [SMTPat (length b)]
let length_null_1 (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false)) [SMTPat (length b)] = | false | null | true | len_null a rrel rel;
null_unique b | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.null_unique",
"Prims.unit",
"LowStar.Monotonic.Buffer.len_null",
"Prims.l_not",
"Prims.eq2",
"Prims.int",
"LowStar.Monotonic.Buffer.length",
"Prims.squash",
"Prims.bool",
"LowStar.Monotonic.Buffer.g_is_null",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.nat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false)) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val length_null_1 (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false)) [SMTPat (length b)] | [] | LowStar.Monotonic.Buffer.length_null_1 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma (requires ~(LowStar.Monotonic.Buffer.length b == 0))
(ensures LowStar.Monotonic.Buffer.g_is_null b == false)
[SMTPat (LowStar.Monotonic.Buffer.length b)] | {
"end_col": 17,
"end_line": 217,
"start_col": 4,
"start_line": 216
} |
FStar.Pervasives.Lemma | val loc_union_idem_2 (s1 s2: loc)
: Lemma (loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2 | val loc_union_idem_2 (s1 s2: loc)
: Lemma (loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
let loc_union_idem_2 (s1 s2: loc)
: Lemma (loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)] = | false | null | true | loc_union_assoc s1 s2 s2 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.loc_union_assoc",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_union_idem_2 (s1 s2: loc)
: Lemma (loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)] | [] | LowStar.Monotonic.Buffer.loc_union_idem_2 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s1: LowStar.Monotonic.Buffer.loc -> s2: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union s1 s2) s2 ==
LowStar.Monotonic.Buffer.loc_union s1 s2)
[SMTPat (LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union s1 s2) s2)] | {
"end_col": 26,
"end_line": 451,
"start_col": 2,
"start_line": 451
} |
Prims.GTot | val loc_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b)) | val loc_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc
let loc_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc = | false | null | false | loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b)) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"sometrivial"
] | [
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"LowStar.Monotonic.Buffer.loc_addresses",
"FStar.Monotonic.HyperStack.frameOf",
"FStar.Set.singleton",
"Prims.nat",
"FStar.Monotonic.HyperStack.as_addr",
"LowStar.Monotonic.Buffer.loc"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_mreference (#a: Type) (#p: Preorder.preorder a) (b: HS.mreference a p) : GTot loc | [] | LowStar.Monotonic.Buffer.loc_mreference | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: FStar.Monotonic.HyperStack.mreference a p -> Prims.GTot LowStar.Monotonic.Buffer.loc | {
"end_col": 66,
"end_line": 557,
"start_col": 2,
"start_line": 557
} |
FStar.Pervasives.Lemma | val loc_includes_union_r' (s s1 s2: loc)
: Lemma (loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2 | val loc_includes_union_r' (s s1 s2: loc)
: Lemma (loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
let loc_includes_union_r' (s s1 s2: loc)
: Lemma (loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))] = | false | null | true | Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Classical.move_requires",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.Monotonic.Buffer.loc_includes_trans",
"Prims.unit",
"Prims.l_or",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"LowStar.Monotonic.Buffer.loc_includes_union_r",
"Prims.l_True",
"Prims.squash",
"Prims.l_iff",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2)) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_includes_union_r' (s s1 s2: loc)
: Lemma (loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))] | [] | LowStar.Monotonic.Buffer.loc_includes_union_r' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
s: LowStar.Monotonic.Buffer.loc ->
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_includes s (LowStar.Monotonic.Buffer.loc_union s1 s2) <==>
LowStar.Monotonic.Buffer.loc_includes s s1 /\ LowStar.Monotonic.Buffer.loc_includes s s2)
[SMTPat (LowStar.Monotonic.Buffer.loc_includes s (LowStar.Monotonic.Buffer.loc_union s1 s2))] | {
"end_col": 69,
"end_line": 654,
"start_col": 2,
"start_line": 650
} |
FStar.Pervasives.Lemma | val loc_disjoint_union_r' (s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2 | val loc_disjoint_union_r' (s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
let loc_disjoint_union_r' (s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))] = | false | null | true | Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Classical.move_requires",
"Prims.l_and",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.Monotonic.Buffer.loc_disjoint_includes",
"Prims.unit",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"LowStar.Monotonic.Buffer.loc_disjoint_union_r",
"Prims.l_True",
"Prims.squash",
"Prims.l_iff",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2))) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_disjoint_union_r' (s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))] | [] | LowStar.Monotonic.Buffer.loc_disjoint_union_r' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
s: LowStar.Monotonic.Buffer.loc ->
s1: LowStar.Monotonic.Buffer.loc ->
s2: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_disjoint s (LowStar.Monotonic.Buffer.loc_union s1 s2) <==>
LowStar.Monotonic.Buffer.loc_disjoint s s1 /\ LowStar.Monotonic.Buffer.loc_disjoint s s2)
[SMTPat (LowStar.Monotonic.Buffer.loc_disjoint s (LowStar.Monotonic.Buffer.loc_union s1 s2))] | {
"end_col": 74,
"end_line": 930,
"start_col": 2,
"start_line": 926
} |
FStar.Pervasives.Lemma | val modifies_liveness_insensitive_region_weak (l2: loc) (h h': HS.mem) (x: HS.rid)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h x))
(ensures (HS.live_region h' x))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)]
]
] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x | val modifies_liveness_insensitive_region_weak (l2: loc) (h h': HS.mem) (x: HS.rid)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h x))
(ensures (HS.live_region h' x))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)]
]
]
let modifies_liveness_insensitive_region_weak (l2: loc) (h h': HS.mem) (x: HS.rid)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h x))
(ensures (HS.live_region h' x))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)]
]
] = | false | null | true | modifies_liveness_insensitive_region loc_none l2 h h' x | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.region_liveness_insensitive_locs",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat_or",
"Prims.list",
"FStar.Pervasives.smt_pat",
"Prims.bool",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)]; | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_liveness_insensitive_region_weak (l2: loc) (h h': HS.mem) (x: HS.rid)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h x))
(ensures (HS.live_region h' x))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)]
]
] | [] | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_weak | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
l2: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: FStar.Monotonic.HyperHeap.rid
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l2 h h' /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.region_liveness_insensitive_locs
l2 /\ FStar.Monotonic.HyperStack.live_region h x)
(ensures FStar.Monotonic.HyperStack.live_region h' x)
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h x)
];
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h' x)
]
]
] | {
"end_col": 57,
"end_line": 1258,
"start_col": 2,
"start_line": 1258
} |
FStar.Pervasives.Lemma | val loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma (requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2)))
[
SMTPat
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2))
] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2 | val loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma (requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2)))
[
SMTPat
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2))
]
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma (requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2)))
[
SMTPat
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2))
] = | false | null | true | loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2 | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"Prims.bool",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Set.set",
"Prims.nat",
"LowStar.Monotonic.Buffer.loc_includes_addresses_addresses",
"Prims.unit",
"Prims.l_and",
"Prims.eq2",
"Prims.l_imp",
"Prims.b2t",
"FStar.Set.subset",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_addresses",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma (requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2)))
[
SMTPat
(loc_includes (loc_addresses preserve_liveness1 r1 s1)
(loc_addresses preserve_liveness2 r2 s2))
] | [] | LowStar.Monotonic.Buffer.loc_includes_addresses_addresses_1 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
preserve_liveness1: Prims.bool ->
preserve_liveness2: Prims.bool ->
r1: FStar.Monotonic.HyperHeap.rid ->
r2: FStar.Monotonic.HyperHeap.rid ->
s1: FStar.Set.set Prims.nat ->
s2: FStar.Set.set Prims.nat
-> FStar.Pervasives.Lemma
(requires r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ FStar.Set.subset s2 s1)
(ensures
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_addresses preserve_liveness1
r1
s1)
(LowStar.Monotonic.Buffer.loc_addresses preserve_liveness2 r2 s2))
[
SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_addresses preserve_liveness1
r1
s1)
(LowStar.Monotonic.Buffer.loc_addresses preserve_liveness2 r2 s2))
] | {
"end_col": 81,
"end_line": 834,
"start_col": 2,
"start_line": 834
} |
FStar.Pervasives.Lemma | val fresh_frame_loc_not_unused_in_disjoint (h0 h1: HS.mem)
: Lemma (requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
= not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1) | val fresh_frame_loc_not_unused_in_disjoint (h0 h1: HS.mem)
: Lemma (requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)]
let fresh_frame_loc_not_unused_in_disjoint (h0 h1: HS.mem)
: Lemma (requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)] = | false | null | true | not_live_region_loc_not_unused_in_disjoint h0 (HS.get_tip h1) | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.not_live_region_loc_not_unused_in_disjoint",
"FStar.Monotonic.HyperStack.get_tip",
"Prims.unit",
"FStar.Monotonic.HyperStack.fresh_frame",
"Prims.squash",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.Monotonic.Buffer.loc_region_only",
"LowStar.Monotonic.Buffer.loc_not_unused_in",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat",
"Prims.logical",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x
/// Modifies clauses are transitive. This lemma is the most general
/// one.
val modifies_trans
(s12: loc)
(h1 h2: HS.mem)
(s23: loc)
(h3: HS.mem)
: Lemma
(requires (modifies s12 h1 h2 /\ modifies s23 h2 h3))
(ensures (modifies (loc_union s12 s23) h1 h3))
let modifies_trans_linear (l l_goal:loc) (h1 h2 h3:HS.mem)
: Lemma (requires (modifies l h1 h2 /\ modifies l_goal h2 h3 /\ l_goal `loc_includes` l))
(ensures (modifies l_goal h1 h3))
[SMTPat (modifies l h1 h2); SMTPat (modifies l_goal h1 h3)]
= modifies_trans l h1 h2 l_goal h3
/// Regions that are not live can be removed from sets of memory
/// locations that are modified.
val modifies_only_live_regions
(rs: Set.set HS.rid)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_regions false rs) l) h h' /\
(forall r . Set.mem r rs ==> (~ (HS.live_region h r)))
))
(ensures (modifies l h h'))
/// As a consequence, fresh regions can be removed from modifies
/// clauses.
val no_upd_fresh_region: r:HS.rid -> l:loc -> h0:HS.mem -> h1:HS.mem -> Lemma
(requires (HS.fresh_region r h0 h1 /\ modifies (loc_union (loc_all_regions_from false r) l) h0 h1))
(ensures (modifies l h0 h1))
[SMTPat (HS.fresh_region r h0 h1); SMTPat (modifies l h0 h1)]
val new_region_modifies (m0: HS.mem) (r0: HS.rid) (col: option int) : Lemma
(requires (HST.is_eternal_region r0 /\ HS.live_region m0 r0 /\ (None? col \/ HS.is_heap_color (Some?.v col))))
(ensures (
let (_, m1) = HS.new_eternal_region m0 r0 col in
modifies loc_none m0 m1
))
[SMTPat (HS.new_eternal_region m0 r0 col)]
/// Stack discipline: any stack frame (and all its transitively
/// extending regions) that is pushed, modified and popped can be
/// removed from a modifies clause.
/// AR: 01/29/2019: Removing the smt pattern from this lemma.
/// Clients are no longer expected to call it explicitly,
/// if you are having to, please raise an issue.
val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
/// Compatibility lemmas to rescue modifies clauses specified in the
/// standard F* HyperStack library.
val modifies_loc_regions_intro
(rs: Set.set HS.rid)
(h1 h2: HS.mem)
: Lemma
(requires (HS.modifies rs h1 h2))
(ensures (modifies (loc_regions true rs) h1 h2))
val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
/// Modifies clauses for allocating a reference: nothing is
/// modified. (In particular, a modifies clause does not track
/// memory locations that are created.)
val modifies_ralloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(i: HS.rid)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel)
(h' : HS.mem)
: Lemma
(requires (HST.ralloc_post i init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.ralloc_post i init h x h')]
val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
/// Modifies clause for freeing a reference: the address is modified.
val modifies_free
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel { HS.is_mm r } )
(m: HS.mem { m `HS.contains` r } )
: Lemma
(modifies (loc_freed_mreference r) m (HS.free r m))
[SMTPat (HS.free r m)]
/// Another compatibility lemma
val modifies_none_modifies
(h1 h2: HS.mem)
: Lemma
(requires (HST.modifies_none h1 h2))
(ensures (modifies loc_none h1 h2))
[SMTPat (HST.modifies_none h1 h2)]
/// Compatibility with HS.upd
val modifies_upd
(#t: Type) (#pre: Preorder.preorder t)
(r: HS.mreference t pre)
(v: t)
(h: HS.mem)
: Lemma
(requires (HS.contains h r))
(ensures (modifies (loc_mreference r) h (HS.upd h r v)))
[SMTPat (HS.upd h r v)]
/// Introduction lemma for modifying loc_buffer_from_to
val modifies_loc_buffer_from_to_intro
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
/// A memory ``h`` does not contain address ``a`` in region ``r``, denoted
/// ``does_not_contain_addr h (r, a)``, only if, either region ``r`` is
/// not live, or address ``a`` is unused in region ``r``.
(* BEGIN TODO: move to FStar.Monotonic.HyperStack *)
val does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: GTot Type0
val not_live_region_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (~ (HS.live_region h (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val unused_in_does_not_contain_addr
(h: HS.mem)
(#a: Type)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
: Lemma
(requires (r `HS.unused_in` h))
(ensures (h `does_not_contain_addr` (HS.frameOf r, HS.as_addr r)))
val addr_unused_in_does_not_contain_addr
(h: HS.mem)
(ra: HS.rid * nat)
: Lemma
(requires (HS.live_region h (fst ra) ==> snd ra `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (fst ra))))
(ensures (h `does_not_contain_addr` ra))
val free_does_not_contain_addr
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
HS.is_mm r /\
m `HS.contains` r /\
fst x == HS.frameOf r /\
snd x == HS.as_addr r
))
(ensures (
HS.free r m `does_not_contain_addr` x
))
[SMTPat (HS.free r m `does_not_contain_addr` x)]
val does_not_contain_addr_elim
(#a: Type0)
(#rel: Preorder.preorder a)
(r: HS.mreference a rel)
(m: HS.mem)
(x: HS.rid * nat)
: Lemma
(requires (
m `does_not_contain_addr` x /\
HS.frameOf r == fst x /\
HS.as_addr r == snd x
))
(ensures (~ (m `HS.contains` r)))
(** END TODO *)
/// Addresses that have not been allocated yet can be removed from
/// modifies clauses.
val modifies_only_live_addresses
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h h' : HS.mem)
: Lemma
(requires (
modifies (loc_union (loc_addresses false r a) l) h h' /\
(forall x . Set.mem x a ==> h `does_not_contain_addr` (r, x))
))
(ensures (modifies l h h'))
(* Generic way to ensure that a buffer just allocated is disjoint from
any other object, however the latter's liveness is defined. *)
val loc_not_unused_in (h: HS.mem) : GTot loc
val loc_unused_in (h: HS.mem) : GTot loc
(* Shortcut notations with more handy names *)
let loc_in (l: loc) (h: HS.mem) =
loc_not_unused_in h `loc_includes` l
let loc_not_in (l: loc) (h: HS.mem) =
loc_unused_in h `loc_includes` l
val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
val loc_unused_in_not_unused_in_disjoint (h: HS.mem) : Lemma
(loc_disjoint (loc_unused_in h) (loc_not_unused_in h))
val not_live_region_loc_not_unused_in_disjoint
(h0: HS.mem)
(r: HS.rid)
: Lemma
(requires (~ (HS.live_region h0 r)))
(ensures (loc_disjoint (loc_region_only false r) (loc_not_unused_in h0)))
let fresh_frame_loc_not_unused_in_disjoint
(h0 h1: HS.mem)
: Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0))) | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val fresh_frame_loc_not_unused_in_disjoint (h0 h1: HS.mem)
: Lemma (requires (HS.fresh_frame h0 h1))
(ensures (loc_disjoint (loc_region_only false (HS.get_tip h1)) (loc_not_unused_in h0)))
[SMTPat (HS.fresh_frame h0 h1)] | [] | LowStar.Monotonic.Buffer.fresh_frame_loc_not_unused_in_disjoint | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h0: FStar.Monotonic.HyperStack.mem -> h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma (requires FStar.Monotonic.HyperStack.fresh_frame h0 h1)
(ensures
LowStar.Monotonic.Buffer.loc_disjoint (LowStar.Monotonic.Buffer.loc_region_only false
(FStar.Monotonic.HyperStack.get_tip h1))
(LowStar.Monotonic.Buffer.loc_not_unused_in h0))
[SMTPat (FStar.Monotonic.HyperStack.fresh_frame h0 h1)] | {
"end_col": 63,
"end_line": 1588,
"start_col": 2,
"start_line": 1588
} |
FStar.Pervasives.Lemma | val modifies_liveness_insensitive_region_buffer_weak
(l2: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))]
]
] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
= modifies_liveness_insensitive_region_buffer loc_none l2 h h' x | val modifies_liveness_insensitive_region_buffer_weak
(l2: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))]
]
]
let modifies_liveness_insensitive_region_buffer_weak
(l2: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))]
]
] = | false | null | true | modifies_liveness_insensitive_region_buffer loc_none l2 h h' x | {
"checked_file": "LowStar.Monotonic.Buffer.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "LowStar.Monotonic.Buffer.fsti"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_buffer",
"LowStar.Monotonic.Buffer.loc_none",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.region_liveness_insensitive_locs",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"LowStar.Monotonic.Buffer.frameOf",
"Prims.squash",
"Prims.Cons",
"FStar.Pervasives.pattern",
"FStar.Pervasives.smt_pat_or",
"Prims.list",
"FStar.Pervasives.smt_pat",
"Prims.bool",
"Prims.Nil"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
(* Most comments are taken from the Low* tutorial at:
https://fstarlang.github.io/lowstar/html/LowStar.html
*)
(* Shorthand for preorder over sequences *)
unfold let srel (a:Type0) = Preorder.preorder (Seq.seq a)
(*
* A compatibility relation between preorders of a sequence and its subsequence
*)
[@@"opaque_to_smt"]
unfold
let compatible_subseq_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= (forall (s1 s2:Seq.seq a). {:pattern (rel s1 s2); (sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))} //for any two sequences s1 and s2
(Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2) ==> //of length len, and related by rel
(sub_rel (Seq.slice s1 i j) (Seq.slice s2 i j))) /\ //their slices [i, j) are related by sub_rel
(forall (s s2:Seq.seq a). {:pattern (sub_rel (Seq.slice s i j) s2); (rel s (Seq.replace_subseq s i j s2))} //for any two sequences s and s2
(Seq.length s == len /\ Seq.length s2 == j - i /\ sub_rel (Seq.slice s i j) s2) ==> //such that s has length len and s2 has length (j - i), and the slice [i, j) of s is related to s2 by sub_rel
(rel s (Seq.replace_subseq s i j s2))) //if we replace the slice [i, j) in s by s2, then s and the resulting buffer are related by rel
/// Low* buffers
/// ==============
///
/// The workhorse of Low*, this module allows modeling C arrays on the
/// stack and in the heap. At compilation time, KaRaMeL implements
/// buffers using C arrays, i.e. if Low* type ``t`` is translated into C
/// type ``u``, then Low* type ``buffer t`` is translated to C type ``u*``.
///
/// The type is indexed by two preorders:
/// rrel is the preorder with which the buffer is initially created
/// rel is the preorder of the current buffer (which could be a sub-buffer of the original one)
///
/// The buffer contents are constrained to evolve according to rel
(*
* rrel is part of the type for technical reasons
* If we make it part of the implementation of the buffer type,
* it bumps up the universe of buffer itself by one,
* which is too restrictive (e.g. no buffers of buffers)
*
* We expect that clients will rarely work with this directly
* Most of the times, they will use wrappers such as buffer, immutable buffer etc.
*)
val mbuffer (a:Type0) (rrel rel:srel a) :Tot Type0
/// The C ``NULL`` pointer is represented as the Low* ``null`` buffer. For
/// any given type, there is exactly one ``null`` buffer of this type,
/// just like there is exactly one C ``NULL`` pointer of any given type.
///
/// The nullity test ``g_is_null`` is ghost, for proof purposes
/// only. The corresponding stateful nullity test is ``is_null``, see
/// below.
(* FIXME: The nullity test for proof purposes is currently expressed
as a ghost predicate, `g_is_null`, but it is scheduled to be
replaced with equality with `null` *)
val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
val null_unique (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Lemma (g_is_null b <==> b == mnull)
/// ``unused_in b h`` holds only if buffer ``b`` has not been allocated
/// yet.
val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
/// ``live h b`` holds if, and only if, buffer ``b`` is currently
/// allocated in ``h`` and has not been deallocated yet.
///
/// This predicate corresponds to the C notion of "lifetime", and as
/// such, is a prerequisite for all stateful operations on buffers
/// (see below), per the C standard:
///
/// If an object is referred to outside of its lifetime, the
/// behavior is undefined.
///
/// -- ISO/IEC 9899:2011, Section 6.2.4 paragraph 2
///
/// By contrast, it is not required for the ghost versions of those
/// operators.
val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
/// The null pointer is always live.
val live_null (a:Type0) (rrel rel:srel a) (h:HS.mem) :Lemma (live h (mnull #a #rrel #rel))
let live_is_null (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true))
(ensures (live h b))
[SMTPat (live h b)]
= null_unique b;
live_null a rrel rel h
/// A live buffer has already been allocated.
val live_not_unused_in (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h)) (ensures False)
/// If two memories have equal domains, then liveness in one implies liveness in the other
val lemma_live_equal_mem_domains (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h0 h1:HS.mem)
:Lemma (requires (HST.equal_domains h0 h1 /\ live h0 b))
(ensures (live h1 b))
[SMTPat (HST.equal_domains h0 h1); SMTPat (live h1 b)]
(* FIXME: the following definition is necessary to isolate the pattern
because of unification. In other words, if we attached the pattern
to `live_not_unused_in`, then we would not be able to use
`FStar.Classical.forall_intro_`n and
`FStar.Classical.move_requires` due to unification issues. Anyway,
we plan to isolate patterns in a separate module to clean up the Z3
context.
*)
let live_not_unused_in' (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b /\ b `unused_in` h))
(ensures False)
[SMTPat (live h b); SMTPat (b `unused_in` h)]
= live_not_unused_in h b
/// Buffers live in the HyperStack model, which is an extension of
/// the HyperHeap model, a hierarchical memory model that divides the
/// heap into a tree of regions. This coarse-grained separation
/// allows the programmer to state modifies clauses at the level of
/// regions, rather than on individual buffers.
///
/// The HyperHeap memory model is described:
/// - in the 2016 POPL paper: https://www.fstar-lang.org/papers/mumon/
/// - in the relevant section of the F* tutorial: http://www.fstar-lang.org/tutorial/
///
/// ``frameOf b`` returns the identifier of the region in which the
/// buffer ``b`` lives.
val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
/// ``as_addr b`` returns the abstract address of the buffer in its
/// region, as an allocation unit: two buffers that are allocated
/// separately in the same region will actually have different
/// addresses, but a sub-buffer of a buffer will actually have the
/// same address as its enclosing buffer.
val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
/// A buffer is unused if, and only if, its address is unused.
val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
/// If a buffer is live, then so is its region.
val live_region_frameOf (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (requires (live h b))
(ensures (HS.live_region h (frameOf b)))
[SMTPatOr [
[SMTPat (live h b)];
[SMTPat (HS.live_region h (frameOf b))];
]]
/// The length of a buffer ``b`` is available as a machine integer ``len
/// b`` or as a mathematical integer ``length b``, but both in ghost
/// (proof) code only: just like in C, one cannot compute the length
/// of a buffer at run-time.
val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat = U32.v (len b)
/// The null pointer has length 0.
val len_null (a:Type0) (rrel rel:srel a) :Lemma (len (mnull #a #rrel #rel) == 0ul)
let length_null_1 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (length b =!= 0)) (ensures (g_is_null b == false))
[SMTPat (length b)]
= len_null a rrel rel;
null_unique b
let length_null_2 (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (requires (g_is_null b == true)) (ensures (length b == 0))
[SMTPat (g_is_null b)]
= len_null a rrel rel;
null_unique b
/// For functional correctness, buffers are reflected at the proof
/// level using sequences, via ``as_seq b h``, which returns the
/// contents of a given buffer ``b`` in a given heap ``h``. If ``b`` is not
/// live in ``h``, then the result is unspecified.
(* TODO: why not return a lseq and remove length_as_seq lemma? *)
val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
/// The contents of a buffer ``b`` has the same length as ``b`` itself.
val length_as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel)
:Lemma (Seq.length (as_seq h b) == length b)
[SMTPat (Seq.length (as_seq h b))]
/// ``get`` is an often-convenient shorthand to index the value of a
/// given buffer in a given heap, for proof purposes.
let get (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (p:mbuffer a rrel rel) (i:nat)
:Ghost a (requires (i < length p)) (ensures (fun _ -> True))
= Seq.index (as_seq h p) i
/// Injectivity in the first preorder
val mbuffer_injectivity_in_first_preorder (_:unit)
: Lemma (forall (a:Type0) (rrel1 rrel2 rel1 rel2:srel a)
(b1:mbuffer a rrel1 rel1)
(b2:mbuffer a rrel2 rel2).
rrel1 =!= rrel2 ==> ~ (b1 === b2))
/// Before defining sub-buffer related API, we need to define the notion of "compatibility"
///
///
/// Sub-buffers can be taken at a different preorder than their parent buffers
/// But we need to ensure that the changes to the sub-buffer are compatible with the preorder
/// of the parent buffer, and vice versa.
(*
* The quantifiers are fiercely guarded, so if you are working directly with them,
* you may have to write additional asserts as triggers
*)
[@@"opaque_to_smt"]
unfold let compatible_sub
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t{U32.v i + U32.v len <= length b}) (sub_rel:srel a)
= compatible_subseq_preorder (length b) rel (U32.v i) (U32.v i + U32.v len) sub_rel
/// ``gsub`` is the way to carve a sub-buffer out of a given
/// buffer. ``gsub b i len`` return the sub-buffer of ``b`` starting from
/// offset ``i`` within ``b``, and with length ``len``. Of course ``i`` and
/// ``len`` must fit within the length of ``b``.
///
/// Further the clients can attach a preorder with the subbuffer (sub_rel),
/// provided it is compatible
///
/// ``gsub`` is the ghost version, for proof purposes. Its stateful
/// counterpart is ``sub``, see below.
val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
// goffset
/// A buffer is live exactly at the same time as all of its sub-buffers.
val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
val gsub_is_null (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (g_is_null (mgsub sub_rel b i len) <==> g_is_null b))
[SMTPat (g_is_null (mgsub sub_rel b i len))]
/// The length of a sub-buffer is exactly the one provided at ``gsub``.
val len_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len':U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len' <= length b))
(ensures (len (mgsub sub_rel b i len') == len'))
[SMTPatOr [
[SMTPat (len (mgsub sub_rel b i len'))];
[SMTPat (length (mgsub sub_rel b i len'))];
]]
val frameOf_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (frameOf (mgsub sub_rel b i len) == frameOf b))
[SMTPat (frameOf (mgsub sub_rel b i len))]
val as_addr_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_addr (mgsub sub_rel b i len) == as_addr b))
[SMTPat (as_addr (mgsub sub_rel b i len))]
val mgsub_inj (#a:Type0) (#rrel #rel:srel a) (sub_rel1 sub_rel2:srel a)
(b1 b2:mbuffer a rrel rel)
(i1 i2:U32.t)
(len1 len2:U32.t)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b1 /\
U32.v i2 + U32.v len2 <= length b2 /\
mgsub sub_rel1 b1 i1 len1 === mgsub sub_rel2 b2 i2 len2))
(ensures (len1 == len2 /\ (b1 == b2 ==> i1 == i2) /\ ((i1 == i2 /\ length b1 == length b2) ==> b1 == b2)))
/// Nesting two ``gsub`` collapses into one ``gsub``, transitively.
val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
/// The contents of a sub-buffer is the corresponding slice of the
/// contents of its enclosing buffer.
val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
/// Two live non-null buffers having the same region and address have
/// their elements of the same type.
val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
/// # The modifies clause
///
/// The modifies clause for regions, references and buffers.
/// ==========================================================
///
/// This module presents the modifies clause, a way to track the set
/// of memory locations modified by a stateful Low* (or even F*)
/// program. The basic principle of modifies clauses is that any
/// location that is disjoint from a set of memory locations modified
/// by an operation is preserved by that operation.
///
/// We start by specifying a monoid of sets of memory locations. From
/// a rough high-level view, ``loc`` is the type of sets of memory
/// locations, equipped with an identity element ``loc_none``,
/// representing the empty set, and an associative and commutative
/// operator, ``loc_union``, representing the union of two sets of
/// memory locations.
///
/// Moreover, ``loc_union`` is idempotent, which is useful to cut SMT
/// matching loops with ``modifies_trans`` and ``modifies_refl`` below.
val loc : Type0
val loc_none: loc
val loc_union
(s1 s2: loc)
: GTot loc
val loc_union_idem
(s: loc)
: Lemma
(loc_union s s == s)
[SMTPat (loc_union s s)]
val loc_union_comm
(s1 s2: loc)
: Lemma
(loc_union s1 s2 == loc_union s2 s1)
[SMTPat (loc_union s1 s2)]
val loc_union_assoc
(s1 s2 s3: loc)
: Lemma
(loc_union s1 (loc_union s2 s3) == loc_union (loc_union s1 s2) s3)
let loc_union_idem_1
(s1 s2: loc)
: Lemma
(loc_union s1 (loc_union s1 s2) == loc_union s1 s2)
[SMTPat (loc_union s1 (loc_union s1 s2))]
= loc_union_assoc s1 s1 s2
let loc_union_idem_2
(s1 s2: loc)
: Lemma
(loc_union (loc_union s1 s2) s2 == loc_union s1 s2)
[SMTPat (loc_union (loc_union s1 s2) s2)]
= loc_union_assoc s1 s2 s2
val loc_union_loc_none_l
(s: loc)
: Lemma
(loc_union loc_none s == s)
[SMTPat (loc_union loc_none s)]
val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
/// ``loc_buffer b`` is the set of memory locations associated to a buffer ``b``.
val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
val loc_buffer_eq (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : Lemma
(loc_buffer b == loc_buffer_from_to b 0ul (len b))
val loc_buffer_from_to_high (#a: Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (length b <= U32.v to))
(ensures (loc_buffer_from_to b from to == loc_buffer_from_to b from (len b)))
val loc_buffer_from_to_none (#a: Type) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (from to: U32.t)
: Lemma
(requires (g_is_null b \/ length b < U32.v from \/ U32.v to < U32.v from))
(ensures (loc_buffer_from_to b from to == loc_none))
val loc_buffer_from_to_mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
(from to: U32.t)
: Lemma
(requires (
U32.v i + U32.v len <= length b /\
U32.v from <= U32.v to /\ U32.v to <= U32.v len
))
(ensures (
loc_buffer_from_to (mgsub sub_rel b i len) from to == loc_buffer_from_to b (i `U32.add` from) (i `U32.add` to)
))
val loc_buffer_mgsub_eq (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub sub_rel b i len) == loc_buffer_from_to b i (i `U32.add` len)))
val loc_buffer_null (a:Type0) (rrel rel:srel a)
:Lemma (loc_buffer (mnull #a #rrel #rel) == loc_none)
[SMTPat (loc_buffer (mnull #a #rrel #rel))]
val loc_buffer_from_to_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(requires (U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (loc_buffer_from_to b from to == loc_buffer (mgsub rel b from (to `U32.sub` from))))
[SMTPat (loc_buffer_from_to b from to)]
val loc_buffer_mgsub_rel_eq
(#a:Type0) (#rrel #rel:srel a)
(b: mbuffer a rrel rel)
(rel1 rel2: srel a)
(i len: U32.t)
: Lemma
(requires (U32.v i + U32.v len <= length b))
(ensures (loc_buffer (mgsub rel1 b i len) == loc_buffer (mgsub rel2 b i len)))
[SMTPat (loc_buffer (mgsub rel1 b i len)); SMTPat (loc_buffer (mgsub rel2 b i len))]
/// ``loc_addresses r n`` is the set of memory locations associated to a
/// set of addresses ``n`` in a given region ``r``.
val loc_addresses
(preserve_liveness: bool)
(r: HS.rid)
(n: Set.set nat)
: GTot loc
unfold let loc_addr_of_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc =
loc_addresses false (frameOf b) (Set.singleton (as_addr b))
/// ``loc_regions r`` is the set of memory locations associated to a set
/// ``r`` of regions.
val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
/// ``loc_mreference b`` is the set of memory locations associated to a
/// reference ``b``, which is actually the set of memory locations
/// associated to the address of ``b``.
unfold
let loc_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses true (HS.frameOf b) (Set.singleton (HS.as_addr b))
unfold
let loc_freed_mreference
(#a: Type)
(#p: Preorder.preorder a)
(b: HS.mreference a p)
: GTot loc
= loc_addresses false (HS.frameOf b) (Set.singleton (HS.as_addr b))
/// ``loc_region_only r`` is the set of memory locations associated to a
/// region ``r`` but not any region ``r'`` that extends ``r`` (in the sense
/// of ``FStar.HyperStack.extends``.)
unfold
let loc_region_only
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (Set.singleton r)
/// ``loc_all_regions_from r`` is the set of all memory locations
/// associated to a region ``r`` and any region ``r'`` that transitively
/// extends ``r`` (in the sense of ``FStar.HyperStack.extends``,
/// e.g. nested stack frames.)
unfold
let loc_all_regions_from
(preserve_liveness: bool)
(r: HS.rid)
: GTot loc
= loc_regions preserve_liveness (HS.mod_set (Set.singleton r))
/// We equip the ``loc`` monoid of sets of memory locations with an
/// inclusion relation, ``loc_includes``, which is a preorder compatible
/// with ``loc_union``. Although we consider sets of memory locations,
/// we do not specify them using any F* set library such as
/// ``FStar.Set``, ``FStar.TSet`` or ``FStar.GSet``, because ``loc_includes``
/// encompasses more than just set-theoretic inclusion.
val loc_includes
(s1 s2: loc)
: GTot Type0
val loc_includes_refl
(s: loc)
: Lemma
(loc_includes s s)
[SMTPat (loc_includes s s)]
val loc_includes_trans
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
let loc_includes_trans_backwards
(s1 s2 s3: loc)
: Lemma
(requires (loc_includes s1 s2 /\ loc_includes s2 s3))
(ensures (loc_includes s1 s3))
[SMTPat (loc_includes s1 s3); SMTPat (loc_includes s2 s3)]
= loc_includes_trans s1 s2 s3
val loc_includes_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_includes s s1 /\ loc_includes s s2))
(ensures (loc_includes s (loc_union s1 s2)))
val loc_includes_union_l
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
let loc_includes_union_l'
(s1 s2 s: loc)
: Lemma
(requires (loc_includes s1 s \/ loc_includes s2 s))
(ensures (loc_includes (loc_union s1 s2) s))
[SMTPat (loc_includes (loc_union s1 s2) s)]
= loc_includes_union_l s1 s2 s
let loc_includes_union_r'
(s s1 s2: loc)
: Lemma
(loc_includes s (loc_union s1 s2) <==> (loc_includes s s1 /\ loc_includes s s2))
[SMTPat (loc_includes s (loc_union s1 s2))]
= Classical.move_requires (loc_includes_union_r s s1) s2;
Classical.move_requires (loc_includes_union_l s1 s2) s1;
Classical.move_requires (loc_includes_union_l s1 s2) s2;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s1;
Classical.move_requires (loc_includes_trans s (loc_union s1 s2)) s2
val loc_includes_none
(s: loc)
: Lemma
(loc_includes s loc_none)
[SMTPat (loc_includes s loc_none)]
/// If a buffer ``b1`` includes a buffer ``b2`` in the sense of the buffer
/// theory (see ``LowStar.Buffer.includes``), then so are their
/// corresponding sets of memory locations.
val loc_includes_gsub_buffer_r
(l:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
: Lemma (requires (UInt32.v i + UInt32.v len <= (length b) /\
loc_includes l (loc_buffer b)))
(ensures (loc_includes l (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (loc_includes l (loc_buffer (mgsub sub_rel b i len)))]
let loc_includes_gsub_buffer_r' (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:UInt32.t) (len:UInt32.t) (sub_rel:srel a)
:Lemma (requires (UInt32.v i + UInt32.v len <= (length b)))
(ensures (loc_includes (loc_buffer b) (loc_buffer (mgsub sub_rel b i len))))
[SMTPat (mgsub sub_rel b i len)]
= ()
val loc_includes_gsub_buffer_l (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i1 <= UInt32.v i2 /\ UInt32.v i2 + UInt32.v len2 <= UInt32.v i1 + UInt32.v len1
))
(ensures (loc_includes (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
val loc_includes_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v from1 <= U32.v from2 /\ U32.v to2 <= U32.v to1))
(ensures (loc_includes (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If the contents of a buffer are equal in two given heaps, then so
/// are the contents of any of its sub-buffers.
val loc_includes_as_seq (#a:Type0) (#rrel #rel1 #rel2:srel a)
(h1 h2:HS.mem) (larger:mbuffer a rrel rel1) (smaller:mbuffer a rrel rel2)
:Lemma (requires (loc_includes (loc_buffer larger) (loc_buffer smaller) /\
as_seq h1 larger == as_seq h2 larger /\
(live h1 larger \/ live h1 smaller) /\ (live h2 larger \/ live h2 smaller)))
(ensures (as_seq h1 smaller == as_seq h2 smaller))
/// Given a buffer ``b``, if its address is in a set ``s`` of addresses in
/// the region of ``b``, then the set of memory locations corresponding
/// to ``b`` is included in the set of memory locations corresponding to
/// the addresses in ``s`` in region ``r``.
///
/// In particular, the set of memory locations corresponding to a
/// buffer is included in the set of memory locations corresponding to
/// its region and address.
val loc_includes_addresses_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (r:HS.rid) (s:Set.set nat) (p:mbuffer a rrel rel)
:Lemma (requires (frameOf p == r /\ Set.mem (as_addr p) s))
(ensures (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p)))
[SMTPat (loc_includes (loc_addresses preserve_liveness r s) (loc_buffer p))]
let loc_includes_addresses_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_addresses true (frameOf b) (Set.singleton (as_addr b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// The set of memory locations corresponding to a buffer is included
/// in the set of memory locations corresponding to its region.
val loc_includes_region_buffer (#a:Type0) (#rrel #rel:srel a)
(preserve_liveness:bool) (s:Set.set HS.rid) (b:mbuffer a rrel rel)
:Lemma (requires (Set.mem (frameOf b) s))
(ensures (loc_includes (loc_regions preserve_liveness s) (loc_buffer b)))
[SMTPat (loc_includes (loc_regions preserve_liveness s) (loc_buffer b))]
let loc_includes_region_buffer' (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (loc_includes (loc_regions true (Set.singleton (frameOf b))) (loc_buffer b))
[SMTPat (loc_buffer b)]
= ()
/// If a region ``r`` is in a set of regions ``s``, then the set of memory
/// locations corresponding to a set of addresses ``a`` in ``r`` is
/// included in the set of memory locations corresponding to the
/// regions in ``s``.
///
/// In particular, the the set of memory locations corresponding to a
/// set of addresses ``a`` in a given region ``r`` is included in the set
/// of memory locations corresponding to region ``r``.
val loc_includes_region_addresses
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s: Set.set HS.rid)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (Set.mem r s))
(ensures (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s) (loc_addresses preserve_liveness2 r a))]
let loc_includes_region_addresses'
(preserve_liveness: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(loc_includes (loc_regions true (Set.singleton r)) (loc_addresses preserve_liveness r a))
[SMTPat (loc_addresses preserve_liveness r a)]
= ()
/// If a set of region identifiers ``s1`` includes a set of region
/// identifiers ``s2``, then so are their corresponding sets of memory
/// locations.
val loc_includes_region_region
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(s1 s2: Set.set HS.rid)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2)))
[SMTPat (loc_includes (loc_regions preserve_liveness1 s1) (loc_regions preserve_liveness2 s2))]
let loc_includes_region_region'
(preserve_liveness: bool)
(s: Set.set HS.rid)
: Lemma
(loc_includes (loc_regions false s) (loc_regions preserve_liveness s))
[SMTPat (loc_regions preserve_liveness s)]
= ()
/// The following lemma can act as a cut when reasoning with sets of
/// memory locations corresponding to sets of regions.
val loc_includes_region_union_l
(preserve_liveness: bool)
(l: loc)
(s1 s2: Set.set HS.rid)
: Lemma
(requires (loc_includes l (loc_regions preserve_liveness (Set.intersect s2 (Set.complement s1)))))
(ensures (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2)))
[SMTPat (loc_includes (loc_union (loc_regions preserve_liveness s1) l) (loc_regions preserve_liveness s2))]
/// If a set of addresses ``s1`` includes a set of addresses ``s2``,
/// then so are their corresponding memory locations
val loc_includes_addresses_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires ((preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r s1) (loc_addresses preserve_liveness2 r s2)))
let loc_includes_addresses_addresses_1
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(s1 s2: Set.set nat)
: Lemma
(requires (r1 == r2 /\ (preserve_liveness1 ==> preserve_liveness2) /\ Set.subset s2 s1))
(ensures (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2)))
[SMTPat (loc_includes (loc_addresses preserve_liveness1 r1 s1) (loc_addresses preserve_liveness2 r2 s2))]
= loc_includes_addresses_addresses preserve_liveness1 preserve_liveness2 r1 s1 s2
let loc_includes_addresses_addresses_2
(preserve_liveness: bool)
(r: HS.rid)
(s: Set.set nat)
: Lemma
(loc_includes (loc_addresses false r s) (loc_addresses preserve_liveness r s))
[SMTPat (loc_addresses preserve_liveness r s)]
= ()
/// Patterns with loc_includes, union on the left
let loc_includes_union_l_buffer
(s1 s2:loc)
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
:Lemma (requires (loc_includes s1 (loc_buffer b) \/ loc_includes s2 (loc_buffer b)))
(ensures (loc_includes (loc_union s1 s2) (loc_buffer b)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_buffer b))]
= loc_includes_union_l s1 s2 (loc_buffer b)
let loc_includes_union_l_addresses
(s1 s2: loc)
(prf: bool)
(r: HS.rid)
(a: Set.set nat)
: Lemma
(requires (loc_includes s1 (loc_addresses prf r a) \/ loc_includes s2 (loc_addresses prf r a)))
(ensures (loc_includes (loc_union s1 s2) (loc_addresses prf r a)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_addresses prf r a))]
= loc_includes_union_l s1 s2 (loc_addresses prf r a)
let loc_includes_union_l_regions
(s1 s2: loc)
(prf: bool)
(r: Set.set HS.rid)
: Lemma
(requires (loc_includes s1 (loc_regions prf r) \/ loc_includes s2 (loc_regions prf r)))
(ensures (loc_includes (loc_union s1 s2) (loc_regions prf r)))
[SMTPat (loc_includes (loc_union s1 s2) (loc_regions prf r))]
= loc_includes_union_l s1 s2 (loc_regions prf r)
/// Since inclusion encompasses more than just set-theoretic
/// inclusion, we also need to specify disjointness accordingly, as a
/// symmetric relation compatible with union.
val loc_disjoint
(s1 s2: loc)
: GTot Type0
val loc_disjoint_sym
(s1 s2: loc)
: Lemma
(requires (loc_disjoint s1 s2))
(ensures (loc_disjoint s2 s1))
let loc_disjoint_sym'
(s1 s2: loc)
: Lemma
(loc_disjoint s1 s2 <==> loc_disjoint s2 s1)
[SMTPat (loc_disjoint s1 s2)]
= Classical.move_requires (loc_disjoint_sym s1) s2;
Classical.move_requires (loc_disjoint_sym s2) s1
val loc_disjoint_none_r
(s: loc)
: Lemma
(ensures (loc_disjoint s loc_none))
[SMTPat (loc_disjoint s loc_none)]
val loc_disjoint_union_r
(s s1 s2: loc)
: Lemma
(requires (loc_disjoint s s1 /\ loc_disjoint s s2))
(ensures (loc_disjoint s (loc_union s1 s2)))
/// If two sets of memory locations are disjoint, then so are any two
/// included sets of memory locations.
val loc_disjoint_includes
(p1 p2 p1' p2' : loc)
: Lemma
(requires (loc_includes p1 p1' /\ loc_includes p2 p2' /\ loc_disjoint p1 p2))
(ensures (loc_disjoint p1' p2'))
let loc_disjoint_union_r'
(s s1 s2: loc)
: Lemma
(ensures (loc_disjoint s (loc_union s1 s2) <==> (loc_disjoint s s1 /\ loc_disjoint s s2)))
[SMTPat (loc_disjoint s (loc_union s1 s2))]
= Classical.move_requires (loc_disjoint_union_r s s1) s2;
loc_includes_union_l s1 s2 s1;
loc_includes_union_l s1 s2 s2;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s1;
Classical.move_requires (loc_disjoint_includes s (loc_union s1 s2) s) s2
let loc_disjoint_includes_r (b1 : loc) (b2 b2': loc) : Lemma
(requires (loc_includes b2 b2' /\ loc_disjoint b1 b2))
(ensures (loc_disjoint b1 b2'))
[SMTPat (loc_disjoint b1 b2'); SMTPat (loc_includes b2 b2')]
= loc_disjoint_includes b1 b2 b1 b2'
val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
/// If two sets of addresses correspond to different regions or are
/// disjoint, then their corresponding sets of memory locations are
/// disjoint.
val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
/// If two sets of region identifiers are disjoint, then so are their
/// corresponding sets of memory locations.
val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
/// Some utilities to work with lists of buffers and locs
(* buf_t is a `buffer` at some type `a` *)
let buf_t = a:Type0 & rrel:srel a & rel:srel a & mbuffer a rrel rel
(* A convenience to construct a buf_t *)
[@@BigOps.__reduce__]
let buf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) : buf_t = (|a, rrel, rel, b|)
(* A conjunction of liveness conditions on the buffers in `l`
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_live (h:HS.mem) (l:list buf_t) : Type0 =
BigOps.big_and #buf_t (fun (| _, _, _, b |) -> live h b) l
(* Pairwise disjointness of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let all_disjoint (l:list loc) : Type0 =
BigOps.pairwise_and loc_disjoint l
(* Union of a list of locations;
Implicitly reduced at typechecking time *)
[@@"opaque_to_smt"]
unfold
let loc_union_l (l:list loc) =
BigOps.normal (List.Tot.fold_right_gtot l loc_union loc_none)
(*
* Same as all_disjoint, retaining for backward compatibility
*)
[@@"opaque_to_smt"]
unfold
let loc_pairwise_disjoint (l:list loc) :Type0 = BigOps.pairwise_and loc_disjoint l
/// The modifies clauses proper.
///
/// Let ``s`` be a set of memory locations, and ``h1`` and ``h2`` be two
/// memory states. Then, ``s`` is modified from ``h1`` to ``h2`` only if,
/// any memory location disjoint from ``s`` is preserved from ``h1`` into
/// ``h2``. Elimination lemmas illustrating this principle follow.
val modifies
(s: loc)
(h1 h2: HS.mem)
: GTot Type0
/// If a region ``r`` is disjoint from a set ``s`` of memory locations
/// which is modified, then its liveness is preserved.
val modifies_live_region
(s: loc)
(h1 h2: HS.mem)
(r: HS.rid)
: Lemma
(requires (modifies s h1 h2 /\ loc_disjoint s (loc_region_only false r) /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
[SMTPatOr [
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h1 r)];
[SMTPat (modifies s h1 h2); SMTPat (HS.live_region h2 r)];
]]
/// If a reference ``b`` is disjoint from a set ``p`` of memory locations
/// which is modified, then its liveness and contents are preserved.
val modifies_mreference_elim
(#t: Type)
(#pre: Preorder.preorder t)
(b: HS.mreference t pre)
(p: loc)
(h h': HS.mem)
: Lemma
(requires (
loc_disjoint (loc_mreference b) p /\
HS.contains h b /\
modifies p h h'
))
(ensures (
HS.contains h' b /\
HS.sel h b == HS.sel h' b
))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (HS.sel h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h b) ];
[ SMTPat (modifies p h h'); SMTPat (HS.sel h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (HS.contains h' b) ]
] ]
/// If a buffer ``b`` is disjoint from a set ``p`` of
/// memory locations which is modified, then its liveness and contents
/// are preserved.
val modifies_buffer_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer b) p /\ live h b /\ modifies p h h'))
(ensures (live h' b /\ (as_seq h b == as_seq h' b)))
[SMTPatOr [
[ SMTPat (modifies p h h'); SMTPat (as_seq h b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h b) ];
[ SMTPat (modifies p h h'); SMTPat (as_seq h' b) ] ;
[ SMTPat (modifies p h h'); SMTPat (live h' b) ]
]]
val modifies_buffer_from_to_elim (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (p:loc) (h h':HS.mem)
:Lemma (requires (loc_disjoint (loc_buffer_from_to b from to) p /\ live h b /\ modifies p h h' /\ U32.v from <= U32.v to /\ U32.v to <= length b))
(ensures (live h' b /\ Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to)))
/// If the memory state does not change, then any memory location is
/// modified (and, in particular, the empty set, ``loc_none``.)
val modifies_refl
(s: loc)
(h: HS.mem)
: Lemma
(modifies s h h)
[SMTPat (modifies s h h)]
/// If a set ``s2`` of memory locations is modified, then so is any set
/// ``s1`` that includes ``s2``. In other words, it is always possible to
/// weaken a modifies clause by widening its set of memory locations.
val modifies_loc_includes
(s1: loc)
(h h': HS.mem)
(s2: loc)
: Lemma
(requires (modifies s2 h h' /\ loc_includes s1 s2))
(ensures (modifies s1 h h'))
[SMTPat (modifies s1 h h'); SMTPat (modifies s2 h h')]
/// Some memory locations are tagged as liveness-insensitive: the
/// liveness preservation of a memory location only depends on its
/// disjointness from the liveness-sensitive memory locations of a
/// modifies clause.
val address_liveness_insensitive_locs: loc
val region_liveness_insensitive_locs: loc
val address_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val address_liveness_insensitive_addresses (r: HS.rid) (a: Set.set nat) : Lemma
(address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))
[SMTPat (address_liveness_insensitive_locs `loc_includes` (loc_addresses true r a))]
val region_liveness_insensitive_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_buffer b))]
val region_liveness_insensitive_addresses (preserve_liveness: bool) (r: HS.rid) (a: Set.set nat) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_addresses preserve_liveness r a))]
val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
val region_liveness_insensitive_address_liveness_insensitive:
squash (region_liveness_insensitive_locs `loc_includes` address_liveness_insensitive_locs)
val modifies_liveness_insensitive_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ address_liveness_insensitive_locs `loc_includes` l2 /\ h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
(* TODO: pattern *)
val modifies_liveness_insensitive_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\
loc_disjoint l1 (loc_buffer x) /\
address_liveness_insensitive_locs `loc_includes` l2 /\
live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies (loc_union l1 l2) h h');];
[SMTPat (live h' x); SMTPat (modifies (loc_union l1 l2) h h');];
]]
let modifies_liveness_insensitive_mreference_weak
(l : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l h h' /\
address_liveness_insensitive_locs `loc_includes` l /\
h `HS.contains` x))
(ensures (h' `HS.contains` x))
[SMTPatOr [
[SMTPat (h `HS.contains` x); SMTPat (modifies l h h');];
[SMTPat (h' `HS.contains` x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_mreference loc_none l h h' x
let modifies_liveness_insensitive_buffer_weak
(l:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l h h' /\ address_liveness_insensitive_locs `loc_includes` l /\ live h x))
(ensures (live h' x))
[SMTPatOr [
[SMTPat (live h x); SMTPat (modifies l h h');];
[SMTPat (live h' x); SMTPat (modifies l h h');];
]]
= modifies_liveness_insensitive_buffer loc_none l h h' x
val modifies_liveness_insensitive_region
(l1 l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_region_only false x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' x)];
]]
val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_weak
(l2 : loc)
(h h' : HS.mem)
(x: HS.rid)
: Lemma
(requires (modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h x))
(ensures (HS.live_region h' x))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h x)];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' x)];
]]
= modifies_liveness_insensitive_region loc_none l2 h h' x
let modifies_liveness_insensitive_region_mreference_weak
(l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
= modifies_liveness_insensitive_region_mreference loc_none l2 h h' x
let modifies_liveness_insensitive_region_buffer_weak
(l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies l2 h h' /\
region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))]; | false | false | LowStar.Monotonic.Buffer.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_liveness_insensitive_region_buffer_weak
(l2: loc)
(h h': HS.mem)
(#a: Type0)
(#rrel #rel: srel a)
(x: mbuffer a rrel rel)
: Lemma
(requires
(modifies l2 h h' /\ region_liveness_insensitive_locs `loc_includes` l2 /\
HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[
SMTPatOr
[
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies l2 h h'); SMTPat (HS.live_region h' (frameOf x))]
]
] | [] | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_buffer_weak | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
l2: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies l2 h h' /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.region_liveness_insensitive_locs
l2 /\ FStar.Monotonic.HyperStack.live_region h (LowStar.Monotonic.Buffer.frameOf x))
(ensures FStar.Monotonic.HyperStack.live_region h' (LowStar.Monotonic.Buffer.frameOf x))
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h (LowStar.Monotonic.Buffer.frameOf x))
];
[
SMTPat (LowStar.Monotonic.Buffer.modifies l2 h h');
SMTPat (FStar.Monotonic.HyperStack.live_region h' (LowStar.Monotonic.Buffer.frameOf x)
)
]
]
] | {
"end_col": 66,
"end_line": 1289,
"start_col": 4,
"start_line": 1289
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.