effect
stringclasses 48
values | original_source_type
stringlengths 0
23k
| opens_and_abbrevs
listlengths 2
92
| isa_cross_project_example
bool 1
class | source_definition
stringlengths 9
57.9k
| partial_definition
stringlengths 7
23.3k
| is_div
bool 2
classes | is_type
null | is_proof
bool 2
classes | completed_definiton
stringlengths 1
250k
| dependencies
dict | effect_flags
sequencelengths 0
2
| ideal_premises
sequencelengths 0
236
| mutual_with
sequencelengths 0
11
| file_context
stringlengths 0
407k
| interleaved
bool 1
class | is_simply_typed
bool 2
classes | file_name
stringlengths 5
48
| vconfig
dict | is_simple_lemma
null | source_type
stringlengths 10
23k
| proof_features
sequencelengths 0
1
| name
stringlengths 8
95
| source
dict | verbose_type
stringlengths 1
7.42k
| source_range
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Prims.GTot | val stack_of_args
(max_arity n: nat)
(rsp: int)
(args: arg_list{List.Tot.length args = n})
(st: Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec stack_of_args (max_arity:nat)
(n:nat)
(rsp:int)
(args:arg_list{List.Tot.length args = n})
(st:Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) =
match args with
| [] -> st
| hd::tl ->
if n <= max_arity then st // We can pass the remaining args in registers
else
let ptr = ((n - max_arity) - 1) * 8 // Arguments on the stack are pushed from right to left
+ (if IA.win then 32 else 0) // The shadow space on Windows comes next
+ 8 // The return address is then pushed on the stack
+ rsp // And we then have all the extra slots required for the Vale procedure
in
let st1 = stack_of_args max_arity (n-1) rsp tl st in
let v = arg_as_nat64 hd in // We will store the arg hd
BS.update_heap64 ptr v st1 | val stack_of_args
(max_arity n: nat)
(rsp: int)
(args: arg_list{List.Tot.length args = n})
(st: Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8)
let rec stack_of_args
(max_arity n: nat)
(rsp: int)
(args: arg_list{List.Tot.length args = n})
(st: Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) = | false | null | false | match args with
| [] -> st
| hd :: tl ->
if n <= max_arity
then st
else
let ptr = ((n - max_arity) - 1) * 8 + (if IA.win then 32 else 0) + 8 + rsp in
let st1 = stack_of_args max_arity (n - 1) rsp tl st in
let v = arg_as_nat64 hd in
BS.update_heap64 ptr v st1 | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"sometrivial"
] | [
"Prims.nat",
"Prims.int",
"Vale.Interop.X64.arg_list",
"Prims.b2t",
"Prims.op_Equality",
"FStar.List.Tot.Base.length",
"Vale.Interop.Base.arg",
"FStar.Map.t",
"Vale.Def.Words_s.nat8",
"Prims.list",
"Prims.op_LessThanOrEqual",
"Prims.bool",
"Vale.Arch.MachineHeap_s.update_heap64",
"Vale.Def.Words_s.nat64",
"Vale.Interop.X64.arg_as_nat64",
"Vale.Interop.X64.stack_of_args",
"Prims.op_Subtraction",
"Prims.op_Addition",
"FStar.Mul.op_Star",
"Vale.Interop.Assumptions.win"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n
unfold
let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r})
let registers = MS.reg_64 -> MS.nat64
let upd_reg (n:nat) (arg_reg:arg_reg_relation n) (regs:registers) (i:nat) (v:_) : registers =
fun (r:MS.reg_64) ->
match arg_reg.of_reg r with
| Some j ->
if i = j then v
else regs r
| _ -> regs r
[@__reduce__]
let arg_as_nat64 (a:arg) : GTot MS.nat64 =
let (| tag, x |) = a in
match tag with
| TD_Base TUInt8 ->
UInt8.v x
| TD_Base TUInt16 ->
UInt16.v x
| TD_Base TUInt32 ->
UInt32.v x
| TD_Base TUInt64 ->
UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x)
[@__reduce__]
let update_regs (n:nat)
(arg_reg:arg_reg_relation n)
(x:arg)
(i:reg_nat n)
(regs:registers)
: GTot registers
= upd_reg n arg_reg regs i (arg_as_nat64 x)
[@__reduce__]
let rec register_of_args (max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(n:nat)
(args:arg_list{List.Tot.length args = n})
(regs:registers) : GTot (regs':registers{regs MS.rRsp == regs' MS.rRsp}) =
match args with
| [] -> regs
| hd::tl ->
if n > max_arity then
// This arguments will be passed on the stack
register_of_args max_arity arg_reg (n-1) tl regs
else
update_regs max_arity arg_reg hd (n - 1) (register_of_args max_arity arg_reg (n - 1) tl regs)
// Pass extra arguments on the stack. The arity_ok condition on inline wrappers ensures that
// this only happens for stdcalls
[@__reduce__]
let rec stack_of_args (max_arity:nat)
(n:nat)
(rsp:int)
(args:arg_list{List.Tot.length args = n})
(st:Map.t int Vale.Def.Words_s.nat8) | false | false | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val stack_of_args
(max_arity n: nat)
(rsp: int)
(args: arg_list{List.Tot.length args = n})
(st: Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) | [
"recursion"
] | Vale.Interop.X64.stack_of_args | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
max_arity: Prims.nat ->
n: Prims.nat ->
rsp: Prims.int ->
args: Vale.Interop.X64.arg_list{FStar.List.Tot.Base.length args = n} ->
st: FStar.Map.t Prims.int Vale.Def.Words_s.nat8
-> Prims.GTot (FStar.Map.t Prims.int Vale.Def.Words_s.nat8) | {
"end_col": 32,
"end_line": 136,
"start_col": 2,
"start_line": 124
} |
FStar.Pervasives.Lemma | val taint_arg_args_b8_mem (args: arg_list) (a: arg)
: Lemma
(List.memP a args /\ Some? (taint_of_arg a) ==> List.memP (taint_arg_b8 a) (args_b8 args)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec taint_arg_args_b8_mem (args:arg_list) (a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
List.memP (taint_arg_b8 a) (args_b8 args))
= match args with
| [] -> ()
| hd::tl ->
taint_arg_args_b8_mem tl a | val taint_arg_args_b8_mem (args: arg_list) (a: arg)
: Lemma
(List.memP a args /\ Some? (taint_of_arg a) ==> List.memP (taint_arg_b8 a) (args_b8 args))
let rec taint_arg_args_b8_mem (args: arg_list) (a: arg)
: Lemma
(List.memP a args /\ Some? (taint_of_arg a) ==> List.memP (taint_arg_b8 a) (args_b8 args)) = | false | null | true | match args with
| [] -> ()
| hd :: tl -> taint_arg_args_b8_mem tl a | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"lemma"
] | [
"Vale.Interop.X64.arg_list",
"Vale.Interop.Base.arg",
"Prims.list",
"Vale.Interop.X64.taint_arg_args_b8_mem",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.l_imp",
"Prims.l_and",
"FStar.List.Tot.Base.memP",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"Vale.Arch.HeapTypes_s.taint",
"Vale.Interop.X64.taint_of_arg",
"Vale.Interop.Types.b8",
"Vale.Interop.X64.taint_arg_b8",
"Vale.Interop.Base.args_b8",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n
unfold
let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r})
let registers = MS.reg_64 -> MS.nat64
let upd_reg (n:nat) (arg_reg:arg_reg_relation n) (regs:registers) (i:nat) (v:_) : registers =
fun (r:MS.reg_64) ->
match arg_reg.of_reg r with
| Some j ->
if i = j then v
else regs r
| _ -> regs r
[@__reduce__]
let arg_as_nat64 (a:arg) : GTot MS.nat64 =
let (| tag, x |) = a in
match tag with
| TD_Base TUInt8 ->
UInt8.v x
| TD_Base TUInt16 ->
UInt16.v x
| TD_Base TUInt32 ->
UInt32.v x
| TD_Base TUInt64 ->
UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x)
[@__reduce__]
let update_regs (n:nat)
(arg_reg:arg_reg_relation n)
(x:arg)
(i:reg_nat n)
(regs:registers)
: GTot registers
= upd_reg n arg_reg regs i (arg_as_nat64 x)
[@__reduce__]
let rec register_of_args (max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(n:nat)
(args:arg_list{List.Tot.length args = n})
(regs:registers) : GTot (regs':registers{regs MS.rRsp == regs' MS.rRsp}) =
match args with
| [] -> regs
| hd::tl ->
if n > max_arity then
// This arguments will be passed on the stack
register_of_args max_arity arg_reg (n-1) tl regs
else
update_regs max_arity arg_reg hd (n - 1) (register_of_args max_arity arg_reg (n - 1) tl regs)
// Pass extra arguments on the stack. The arity_ok condition on inline wrappers ensures that
// this only happens for stdcalls
[@__reduce__]
let rec stack_of_args (max_arity:nat)
(n:nat)
(rsp:int)
(args:arg_list{List.Tot.length args = n})
(st:Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) =
match args with
| [] -> st
| hd::tl ->
if n <= max_arity then st // We can pass the remaining args in registers
else
let ptr = ((n - max_arity) - 1) * 8 // Arguments on the stack are pushed from right to left
+ (if IA.win then 32 else 0) // The shadow space on Windows comes next
+ 8 // The return address is then pushed on the stack
+ rsp // And we then have all the extra slots required for the Vale procedure
in
let st1 = stack_of_args max_arity (n-1) rsp tl st in
let v = arg_as_nat64 hd in // We will store the arg hd
BS.update_heap64 ptr v st1
////////////////////////////////////////////////////////////////////////////////
let taint_map = b8 -> GTot taint
let upd_taint_map_b8 (tm:taint_map) (x:b8) (tnt:taint) : taint_map =
fun (y:b8) ->
if StrongExcludedMiddle.strong_excluded_middle ((x <: b8) == y) then
tnt
else tm y
[@__reduce__]
let upd_taint_map_arg (a:arg) (tm:taint_map) : GTot taint_map =
match a with
| (| TD_Buffer _ _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (Buffer true x) tnt
| (| TD_ImmBuffer src _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (imm_to_b8 src x) tnt
| (| TD_Base _, _ |) ->
tm
let init_taint : taint_map = fun r -> Public
[@__reduce__]
let mk_taint (args:arg_list_sb) (tm:taint_map) : GTot taint_map =
List.fold_right_gtot args upd_taint_map_arg init_taint
let taint_of_arg (a:arg) =
let (| tag, x |) = a in
match tag with
| TD_ImmBuffer _ TUInt64 {taint=tnt}
| TD_ImmBuffer _ TUInt128 {taint=tnt}
| TD_Buffer _ TUInt64 {taint=tnt}
| TD_Buffer _ TUInt128 {taint=tnt} -> Some tnt
| _ -> None
let taint_arg_b8 (a:arg{Some? (taint_of_arg a)}) : GTot b8 =
let (| tag, x |) = a in
match tag with
| TD_Buffer src _ _ -> Buffer true (x <: B.buffer (base_typ_as_type src))
| TD_ImmBuffer src _ _ -> imm_to_b8 src x
let rec taint_arg_args_b8_mem (args:arg_list) (a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==> | false | false | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val taint_arg_args_b8_mem (args: arg_list) (a: arg)
: Lemma
(List.memP a args /\ Some? (taint_of_arg a) ==> List.memP (taint_arg_b8 a) (args_b8 args)) | [
"recursion"
] | Vale.Interop.X64.taint_arg_args_b8_mem | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | args: Vale.Interop.X64.arg_list -> a: Vale.Interop.Base.arg
-> FStar.Pervasives.Lemma
(ensures
FStar.List.Tot.Base.memP a args /\ Some? (Vale.Interop.X64.taint_of_arg a) ==>
FStar.List.Tot.Base.memP (Vale.Interop.X64.taint_arg_b8 a) (Vale.Interop.Base.args_b8 args)) | {
"end_col": 32,
"end_line": 184,
"start_col": 4,
"start_line": 181
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let as_lowstar_sig_t_weak
(n:nat{n <= 20})
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:list arg{List.length args + List.length dom <= n})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c))
(predict:prediction_t n arg_reg regs_modified xmms_modified c dom args pre_rel post_rel) =
as_lowstar_sig_t_weak' n arg_reg regs_modified xmms_modified c dom args pre_rel post_rel predict | let as_lowstar_sig_t_weak
(n: nat{n <= 20})
(arg_reg: arg_reg_relation n)
(regs_modified: (MS.reg_64 -> bool))
(xmms_modified: (MS.reg_xmm -> bool))
(c: BS.code)
(dom: list td)
(args: list arg {List.length args + List.length dom <= n})
(pre_rel: rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel: rel_gen_t c dom args (prediction_post_rel_t c))
(predict: prediction_t n arg_reg regs_modified xmms_modified c dom args pre_rel post_rel)
= | false | null | false | as_lowstar_sig_t_weak' n arg_reg regs_modified xmms_modified c dom args pre_rel post_rel predict | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"total"
] | [
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Vale.Interop.X64.arg_reg_relation",
"Vale.X64.Machine_s.reg_64",
"Prims.bool",
"Vale.X64.Machine_s.reg_xmm",
"Vale.X64.Machine_Semantics_s.code",
"Prims.list",
"Vale.Interop.Base.td",
"Vale.Interop.Base.arg",
"Prims.op_Addition",
"FStar.List.Tot.Base.length",
"Vale.Interop.X64.rel_gen_t",
"Vale.Interop.X64.prediction_pre_rel_t",
"Vale.Interop.X64.prediction_post_rel_t",
"Vale.Interop.X64.prediction_t",
"Vale.Interop.X64.as_lowstar_sig_t_weak'"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n
unfold
let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r})
let registers = MS.reg_64 -> MS.nat64
let upd_reg (n:nat) (arg_reg:arg_reg_relation n) (regs:registers) (i:nat) (v:_) : registers =
fun (r:MS.reg_64) ->
match arg_reg.of_reg r with
| Some j ->
if i = j then v
else regs r
| _ -> regs r
[@__reduce__]
let arg_as_nat64 (a:arg) : GTot MS.nat64 =
let (| tag, x |) = a in
match tag with
| TD_Base TUInt8 ->
UInt8.v x
| TD_Base TUInt16 ->
UInt16.v x
| TD_Base TUInt32 ->
UInt32.v x
| TD_Base TUInt64 ->
UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x)
[@__reduce__]
let update_regs (n:nat)
(arg_reg:arg_reg_relation n)
(x:arg)
(i:reg_nat n)
(regs:registers)
: GTot registers
= upd_reg n arg_reg regs i (arg_as_nat64 x)
[@__reduce__]
let rec register_of_args (max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(n:nat)
(args:arg_list{List.Tot.length args = n})
(regs:registers) : GTot (regs':registers{regs MS.rRsp == regs' MS.rRsp}) =
match args with
| [] -> regs
| hd::tl ->
if n > max_arity then
// This arguments will be passed on the stack
register_of_args max_arity arg_reg (n-1) tl regs
else
update_regs max_arity arg_reg hd (n - 1) (register_of_args max_arity arg_reg (n - 1) tl regs)
// Pass extra arguments on the stack. The arity_ok condition on inline wrappers ensures that
// this only happens for stdcalls
[@__reduce__]
let rec stack_of_args (max_arity:nat)
(n:nat)
(rsp:int)
(args:arg_list{List.Tot.length args = n})
(st:Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) =
match args with
| [] -> st
| hd::tl ->
if n <= max_arity then st // We can pass the remaining args in registers
else
let ptr = ((n - max_arity) - 1) * 8 // Arguments on the stack are pushed from right to left
+ (if IA.win then 32 else 0) // The shadow space on Windows comes next
+ 8 // The return address is then pushed on the stack
+ rsp // And we then have all the extra slots required for the Vale procedure
in
let st1 = stack_of_args max_arity (n-1) rsp tl st in
let v = arg_as_nat64 hd in // We will store the arg hd
BS.update_heap64 ptr v st1
////////////////////////////////////////////////////////////////////////////////
let taint_map = b8 -> GTot taint
let upd_taint_map_b8 (tm:taint_map) (x:b8) (tnt:taint) : taint_map =
fun (y:b8) ->
if StrongExcludedMiddle.strong_excluded_middle ((x <: b8) == y) then
tnt
else tm y
[@__reduce__]
let upd_taint_map_arg (a:arg) (tm:taint_map) : GTot taint_map =
match a with
| (| TD_Buffer _ _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (Buffer true x) tnt
| (| TD_ImmBuffer src _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (imm_to_b8 src x) tnt
| (| TD_Base _, _ |) ->
tm
let init_taint : taint_map = fun r -> Public
[@__reduce__]
let mk_taint (args:arg_list_sb) (tm:taint_map) : GTot taint_map =
List.fold_right_gtot args upd_taint_map_arg init_taint
let taint_of_arg (a:arg) =
let (| tag, x |) = a in
match tag with
| TD_ImmBuffer _ TUInt64 {taint=tnt}
| TD_ImmBuffer _ TUInt128 {taint=tnt}
| TD_Buffer _ TUInt64 {taint=tnt}
| TD_Buffer _ TUInt128 {taint=tnt} -> Some tnt
| _ -> None
let taint_arg_b8 (a:arg{Some? (taint_of_arg a)}) : GTot b8 =
let (| tag, x |) = a in
match tag with
| TD_Buffer src _ _ -> Buffer true (x <: B.buffer (base_typ_as_type src))
| TD_ImmBuffer src _ _ -> imm_to_b8 src x
let rec taint_arg_args_b8_mem (args:arg_list) (a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
List.memP (taint_arg_b8 a) (args_b8 args))
= match args with
| [] -> ()
| hd::tl ->
taint_arg_args_b8_mem tl a
let rec mk_taint_equiv
(args:arg_list_sb{disjoint_or_eq args})
(a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
Some?.v (taint_of_arg a) == (mk_taint args init_taint) (taint_arg_b8 a))
= match args with
| [] -> ()
| hd::tl ->
mk_taint_equiv tl a;
let (| tag, x |) = hd in
match tag with
| TD_Base _ -> ()
| TD_Buffer _ _ _ | TD_ImmBuffer _ _ _ ->
disjoint_or_eq_cons hd tl;
BigOps.big_and'_forall (disjoint_or_eq_1 hd) tl
////////////////////////////////////////////////////////////////////////////////
let state_builder_t (max_arity:nat) (args:arg_list) (codom:Type) =
h0:HS.mem{mem_roots_p h0 args} ->
GTot codom
// Splitting the construction of the initial state into two functions
// one that creates the initial trusted state (i.e., part of our TCB)
// and another that just creates the vale state, a view upon the trusted one
let create_initial_trusted_state
(max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(args:arg_list)
: state_builder_t max_arity args (BS.machine_state & interop_heap) =
fun h0 ->
let open MS in
let regs_64 = register_of_args max_arity arg_reg (List.Tot.length args) args IA.init_regs in
let xmms = IA.init_xmms in
let flags = FunctionalExtensionality.on flag IA.init_flags in
let init_rsp = regs_64 rRsp in
let regs = FunctionalExtensionality.on_dom reg #t_reg (fun r ->
match r with
| Reg 0 r -> regs_64 r
| Reg 1 r -> xmms r)
in
// Create an initial empty stack
let stack = Map.const_on Set.empty 0 in
// Spill additional arguments on the stack
let stack = stack_of_args max_arity (List.Tot.length args) init_rsp args stack in
let mem:interop_heap = mk_mem args h0 in
let memTaint = create_memtaint mem (args_b8 args) (mk_taint args init_taint) in
let (s0:BS.machine_state) = {
BS.ms_ok = true;
BS.ms_regs = regs;
BS.ms_flags = flags;
BS.ms_heap = heap_create_impl mem memTaint;
BS.ms_stack = BS.Machine_stack init_rsp stack;
BS.ms_stackTaint = Map.const Public;
BS.ms_trace = [];
} in
(s0, mem)
////////////////////////////////////////////////////////////////////////////////
let prediction_pre_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
prop
let return_val_t (sn:BS.machine_state) = r:UInt64.t{UInt64.v r == BS.eval_reg_64 MS.rRax sn}
let return_val (sn:BS.machine_state) : return_val_t sn =
UInt64.uint_to_t (BS.eval_reg_64 MS.rRax sn)
let prediction_post_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
s0:BS.machine_state ->
(UInt64.t & nat & interop_heap) ->
sn:BS.machine_state ->
prop
[@__reduce__]
let prediction_pre
(n:nat)
(arg_reg:arg_reg_relation n)
(c:BS.code)
(args:arg_list)
(pre_rel: prediction_pre_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
=
pre_rel h0 /\
s0 == fst (create_initial_trusted_state n arg_reg args h0)
[@__reduce__]
let prediction_post
(n:nat)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(post_rel: prediction_post_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
(rax_fuel_mem:(UInt64.t & nat & interop_heap)) =
let (rax, fuel, final_mem) = rax_fuel_mem in
Some? (BS.machine_eval_code c fuel s0) /\ (
let s1 = Some?.v (BS.machine_eval_code c fuel s0) in
let h1 = hs_of_mem final_mem in
FStar.HyperStack.ST.equal_domains h0 h1 /\
B.modifies (loc_modified_args args) h0 h1 /\
mem_roots_p h1 args /\
heap_create_machine (mk_mem args h1) == heap_get s1.BS.ms_heap /\
calling_conventions s0 s1 regs_modified xmms_modified /\
rax == return_val s1 /\
post_rel h0 s0 rax_fuel_mem s1
)
let prediction
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(pre_rel:prediction_pre_rel_t c args)
(post_rel:prediction_post_rel_t c args) =
h0:mem_roots args{pre_rel h0} ->
s0:BS.machine_state ->
Ghost (UInt64.t & nat & interop_heap)
(requires prediction_pre n arg_reg c args pre_rel h0 s0)
(ensures prediction_post n regs_modified xmms_modified c args post_rel h0 s0)
noeq
type as_lowstar_sig_ret =
| As_lowstar_sig_ret :
n:nat ->
args:arg_list ->
fuel:nat ->
final_mem:interop_heap ->
as_lowstar_sig_ret
let als_ret = UInt64.t & Ghost.erased as_lowstar_sig_ret
[@__reduce__]
let as_lowstar_sig_post
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret:als_ret)
(h1:HS.mem) =
(* write it this way to be reduction friendly *)
let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\
n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
h1 == hs_of_mem final_mem /\
prediction_pre n arg_reg c args pre_rel h0 s0 /\
(rax, fuel, final_mem) == predict h0 s0 /\
prediction_post n regs_modified xmms_modified c args post_rel h0 s0 (rax, fuel, final_mem) /\
FStar.HyperStack.ST.equal_domains h0 h1)
[@__reduce__]
let as_lowstar_sig_post_weak
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret:als_ret)
(h1:HS.mem) =
(* write it this way to be reduction friendly *)
let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\
n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
(exists fuel
final_mem
s1.
h1 == hs_of_mem final_mem /\
rax == return_val s1 /\
post_rel h0 s0 (return_val s1, fuel, final_mem) s1))
[@__reduce__]
let as_lowstar_sig (c:BS.code) =
n:nat ->
arg_reg:arg_reg_relation n ->
regs_modified:(MS.reg_64 -> bool) ->
xmms_modified:(MS.reg_xmm -> bool) ->
args:arg_list ->
#pre_rel:_ ->
#post_rel:_ ->
predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel ->
FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 -> mem_roots_p h0 args /\ pre_rel h0))
(ensures fun h0 ret h1 -> as_lowstar_sig_post n arg_reg regs_modified xmms_modified c args h0 predict ret h1)
val wrap_variadic (c:BS.code) : as_lowstar_sig c
[@__reduce__]
let (++) (#t:td) (x:td_as_type t) (args:list arg) = (| t, x |) :: args
[@__reduce__]
let rec rel_gen_t
(c:BS.code)
(td:list td)
(args:arg_list{List.length args + List.length td <= 20})
(f: arg_list -> Type) =
match td with
| [] -> f args
| hd::tl ->
x:td_as_type hd ->
rel_gen_t c tl (x++args) f
[@__reduce__]
let elim_rel_gen_t_nil #c #args #f (x:rel_gen_t c [] args f)
: f args
= x
[@__reduce__]
let elim_rel_gen_t_cons #c hd tl #args #f (p:rel_gen_t c (hd::tl) args f)
: (x:td_as_type hd ->
rel_gen_t c tl (x++args) f)
= p
let rec prediction_t
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:arg_list{List.length dom + List.length args <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c))
=
match dom with
| [] ->
prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel
| hd::tl ->
x:td_as_type hd ->
prediction_t
n
arg_reg
regs_modified
xmms_modified
c
tl
(x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x)
[@__reduce__]
let elim_predict_t_nil
(#n:nat)
(#arg_reg:arg_reg_relation n)
(#regs_modified:MS.reg_64 -> bool)
(#xmms_modified:MS.reg_xmm -> bool)
(#c:BS.code)
(#args:arg_list)
(#pre_rel:_)
(#post_rel:_)
(p:prediction_t n arg_reg regs_modified xmms_modified c [] args pre_rel post_rel)
: prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel
= p
[@__reduce__]
let elim_predict_t_cons
(#n:nat)
(#arg_reg:arg_reg_relation n)
(#regs_modified:MS.reg_64 -> bool)
(#xmms_modified:MS.reg_xmm -> bool)
(#c:BS.code)
(hd:td)
(tl:list td)
(#args:arg_list{List.length args + List.length tl <= 19})
(#pre_rel:_)
(#post_rel:_)
(p:prediction_t n arg_reg regs_modified xmms_modified c (hd::tl) args pre_rel post_rel)
: x:td_as_type hd ->
prediction_t n arg_reg regs_modified xmms_modified c tl (x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x)
= p
[@__reduce__]
let rec as_lowstar_sig_t
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:arg_list{List.length args + List.length dom <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c))
(predict:prediction_t n arg_reg regs_modified xmms_modified c dom args pre_rel post_rel) =
match dom with
| [] ->
(unit ->
FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 ->
mem_roots_p h0 args /\
elim_rel_gen_t_nil pre_rel h0))
(ensures fun h0 ret h1 ->
as_lowstar_sig_post n arg_reg regs_modified xmms_modified c args h0
#pre_rel #post_rel (elim_predict_t_nil predict) ret h1))
| hd::tl ->
x:td_as_type hd ->
as_lowstar_sig_t
n
arg_reg
regs_modified
xmms_modified
c
tl
(x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x)
(elim_predict_t_cons hd tl predict x)
private
val wrap'
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td{List.length dom <= 20})
(#pre_rel:rel_gen_t c dom [] (prediction_pre_rel_t c))
(#post_rel:rel_gen_t c dom [] (prediction_post_rel_t c))
(predict:prediction_t n arg_reg regs_modified xmms_modified c dom [] pre_rel post_rel)
: as_lowstar_sig_t n arg_reg regs_modified xmms_modified c dom [] pre_rel post_rel predict
[@__reduce__]
private
let rec as_lowstar_sig_t_weak'
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:list arg{List.length args + List.length dom <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c))
(predict:prediction_t n arg_reg regs_modified xmms_modified c dom args pre_rel post_rel) =
match dom with
| [] ->
(unit ->
FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 ->
mem_roots_p h0 args /\
elim_rel_gen_t_nil pre_rel h0))
(ensures fun h0 ret h1 ->
as_lowstar_sig_post_weak n arg_reg regs_modified xmms_modified c args h0
#pre_rel #post_rel (elim_predict_t_nil predict) ret h1))
| hd::tl ->
x:td_as_type hd ->
as_lowstar_sig_t_weak'
n
arg_reg
regs_modified
xmms_modified
c
tl
(x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x)
(elim_predict_t_cons hd tl predict x)
private
val wrap_weak'
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td{List.length dom <= 20})
(#pre_rel:rel_gen_t c dom [] (prediction_pre_rel_t c))
(#post_rel:rel_gen_t c dom [] (prediction_post_rel_t c))
(predict:prediction_t n arg_reg regs_modified xmms_modified c dom [] pre_rel post_rel)
: as_lowstar_sig_t_weak' n arg_reg regs_modified xmms_modified c dom [] pre_rel post_rel predict
(* These two functions are the ones that are available from outside the module. The arity_ok restriction ensures that all arguments are passed in registers for inline assembly *)
[@__reduce__]
let as_lowstar_sig_t_weak
(n:nat{n <= 20})
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:list arg{List.length args + List.length dom <= n})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c)) | false | false | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val as_lowstar_sig_t_weak : n: Prims.nat{n <= 20} ->
arg_reg: Vale.Interop.X64.arg_reg_relation n ->
regs_modified: (_: Vale.X64.Machine_s.reg_64 -> Prims.bool) ->
xmms_modified: (_: Vale.X64.Machine_s.reg_xmm -> Prims.bool) ->
c: Vale.X64.Machine_Semantics_s.code ->
dom: Prims.list Vale.Interop.Base.td ->
args:
Prims.list Vale.Interop.Base.arg
{FStar.List.Tot.Base.length args + FStar.List.Tot.Base.length dom <= n} ->
pre_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_pre_rel_t c) ->
post_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_post_rel_t c) ->
predict:
Vale.Interop.X64.prediction_t n
arg_reg
regs_modified
xmms_modified
c
dom
args
pre_rel
post_rel
-> Type0 | [] | Vale.Interop.X64.as_lowstar_sig_t_weak | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
n: Prims.nat{n <= 20} ->
arg_reg: Vale.Interop.X64.arg_reg_relation n ->
regs_modified: (_: Vale.X64.Machine_s.reg_64 -> Prims.bool) ->
xmms_modified: (_: Vale.X64.Machine_s.reg_xmm -> Prims.bool) ->
c: Vale.X64.Machine_Semantics_s.code ->
dom: Prims.list Vale.Interop.Base.td ->
args:
Prims.list Vale.Interop.Base.arg
{FStar.List.Tot.Base.length args + FStar.List.Tot.Base.length dom <= n} ->
pre_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_pre_rel_t c) ->
post_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_post_rel_t c) ->
predict:
Vale.Interop.X64.prediction_t n
arg_reg
regs_modified
xmms_modified
c
dom
args
pre_rel
post_rel
-> Type0 | {
"end_col": 102,
"end_line": 595,
"start_col": 6,
"start_line": 595
} |
|
Prims.GTot | val arg_as_nat64 (a: arg) : GTot MS.nat64 | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let arg_as_nat64 (a:arg) : GTot MS.nat64 =
let (| tag, x |) = a in
match tag with
| TD_Base TUInt8 ->
UInt8.v x
| TD_Base TUInt16 ->
UInt16.v x
| TD_Base TUInt32 ->
UInt32.v x
| TD_Base TUInt64 ->
UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x) | val arg_as_nat64 (a: arg) : GTot MS.nat64
let arg_as_nat64 (a: arg) : GTot MS.nat64 = | false | null | false | let (| tag , x |) = a in
match tag with
| TD_Base TUInt8 -> UInt8.v x
| TD_Base TUInt16 -> UInt16.v x
| TD_Base TUInt32 -> UInt32.v x
| TD_Base TUInt64 -> UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x) | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"sometrivial"
] | [
"Vale.Interop.Base.arg",
"Vale.Interop.Base.td",
"Vale.Interop.Base.td_as_type",
"FStar.UInt8.v",
"FStar.UInt16.v",
"FStar.UInt32.v",
"FStar.UInt64.v",
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.Interop.Base.buffer_qualifiers",
"Vale.Interop.Heap_s.global_addrs_map",
"Vale.Interop.Types.b8",
"Vale.Interop.Types.Buffer",
"LowStar.Buffer.buffer",
"Vale.Interop.Types.base_typ_as_type",
"Vale.Interop.Base.imm_to_b8",
"Vale.X64.Machine_s.nat64"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n
unfold
let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r})
let registers = MS.reg_64 -> MS.nat64
let upd_reg (n:nat) (arg_reg:arg_reg_relation n) (regs:registers) (i:nat) (v:_) : registers =
fun (r:MS.reg_64) ->
match arg_reg.of_reg r with
| Some j ->
if i = j then v
else regs r
| _ -> regs r | false | false | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val arg_as_nat64 (a: arg) : GTot MS.nat64 | [] | Vale.Interop.X64.arg_as_nat64 | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | a: Vale.Interop.Base.arg -> Prims.GTot Vale.X64.Machine_s.nat64 | {
"end_col": 62,
"end_line": 89,
"start_col": 42,
"start_line": 75
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec rel_gen_t
(c:BS.code)
(td:list td)
(args:arg_list{List.length args + List.length td <= 20})
(f: arg_list -> Type) =
match td with
| [] -> f args
| hd::tl ->
x:td_as_type hd ->
rel_gen_t c tl (x++args) f | let rec rel_gen_t
(c: BS.code)
(td: list td)
(args: arg_list{List.length args + List.length td <= 20})
(f: (arg_list -> Type))
= | false | null | false | match td with
| [] -> f args
| hd :: tl -> x: td_as_type hd -> rel_gen_t c tl (x ++ args) f | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"total"
] | [
"Vale.X64.Machine_Semantics_s.code",
"Prims.list",
"Vale.Interop.Base.td",
"Vale.Interop.X64.arg_list",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.List.Tot.Base.length",
"Vale.Interop.Base.arg",
"Vale.Interop.Base.td_as_type",
"Vale.Interop.X64.rel_gen_t",
"Vale.Interop.X64.op_Plus_Plus"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n
unfold
let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r})
let registers = MS.reg_64 -> MS.nat64
let upd_reg (n:nat) (arg_reg:arg_reg_relation n) (regs:registers) (i:nat) (v:_) : registers =
fun (r:MS.reg_64) ->
match arg_reg.of_reg r with
| Some j ->
if i = j then v
else regs r
| _ -> regs r
[@__reduce__]
let arg_as_nat64 (a:arg) : GTot MS.nat64 =
let (| tag, x |) = a in
match tag with
| TD_Base TUInt8 ->
UInt8.v x
| TD_Base TUInt16 ->
UInt16.v x
| TD_Base TUInt32 ->
UInt32.v x
| TD_Base TUInt64 ->
UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x)
[@__reduce__]
let update_regs (n:nat)
(arg_reg:arg_reg_relation n)
(x:arg)
(i:reg_nat n)
(regs:registers)
: GTot registers
= upd_reg n arg_reg regs i (arg_as_nat64 x)
[@__reduce__]
let rec register_of_args (max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(n:nat)
(args:arg_list{List.Tot.length args = n})
(regs:registers) : GTot (regs':registers{regs MS.rRsp == regs' MS.rRsp}) =
match args with
| [] -> regs
| hd::tl ->
if n > max_arity then
// This arguments will be passed on the stack
register_of_args max_arity arg_reg (n-1) tl regs
else
update_regs max_arity arg_reg hd (n - 1) (register_of_args max_arity arg_reg (n - 1) tl regs)
// Pass extra arguments on the stack. The arity_ok condition on inline wrappers ensures that
// this only happens for stdcalls
[@__reduce__]
let rec stack_of_args (max_arity:nat)
(n:nat)
(rsp:int)
(args:arg_list{List.Tot.length args = n})
(st:Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) =
match args with
| [] -> st
| hd::tl ->
if n <= max_arity then st // We can pass the remaining args in registers
else
let ptr = ((n - max_arity) - 1) * 8 // Arguments on the stack are pushed from right to left
+ (if IA.win then 32 else 0) // The shadow space on Windows comes next
+ 8 // The return address is then pushed on the stack
+ rsp // And we then have all the extra slots required for the Vale procedure
in
let st1 = stack_of_args max_arity (n-1) rsp tl st in
let v = arg_as_nat64 hd in // We will store the arg hd
BS.update_heap64 ptr v st1
////////////////////////////////////////////////////////////////////////////////
let taint_map = b8 -> GTot taint
let upd_taint_map_b8 (tm:taint_map) (x:b8) (tnt:taint) : taint_map =
fun (y:b8) ->
if StrongExcludedMiddle.strong_excluded_middle ((x <: b8) == y) then
tnt
else tm y
[@__reduce__]
let upd_taint_map_arg (a:arg) (tm:taint_map) : GTot taint_map =
match a with
| (| TD_Buffer _ _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (Buffer true x) tnt
| (| TD_ImmBuffer src _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (imm_to_b8 src x) tnt
| (| TD_Base _, _ |) ->
tm
let init_taint : taint_map = fun r -> Public
[@__reduce__]
let mk_taint (args:arg_list_sb) (tm:taint_map) : GTot taint_map =
List.fold_right_gtot args upd_taint_map_arg init_taint
let taint_of_arg (a:arg) =
let (| tag, x |) = a in
match tag with
| TD_ImmBuffer _ TUInt64 {taint=tnt}
| TD_ImmBuffer _ TUInt128 {taint=tnt}
| TD_Buffer _ TUInt64 {taint=tnt}
| TD_Buffer _ TUInt128 {taint=tnt} -> Some tnt
| _ -> None
let taint_arg_b8 (a:arg{Some? (taint_of_arg a)}) : GTot b8 =
let (| tag, x |) = a in
match tag with
| TD_Buffer src _ _ -> Buffer true (x <: B.buffer (base_typ_as_type src))
| TD_ImmBuffer src _ _ -> imm_to_b8 src x
let rec taint_arg_args_b8_mem (args:arg_list) (a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
List.memP (taint_arg_b8 a) (args_b8 args))
= match args with
| [] -> ()
| hd::tl ->
taint_arg_args_b8_mem tl a
let rec mk_taint_equiv
(args:arg_list_sb{disjoint_or_eq args})
(a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
Some?.v (taint_of_arg a) == (mk_taint args init_taint) (taint_arg_b8 a))
= match args with
| [] -> ()
| hd::tl ->
mk_taint_equiv tl a;
let (| tag, x |) = hd in
match tag with
| TD_Base _ -> ()
| TD_Buffer _ _ _ | TD_ImmBuffer _ _ _ ->
disjoint_or_eq_cons hd tl;
BigOps.big_and'_forall (disjoint_or_eq_1 hd) tl
////////////////////////////////////////////////////////////////////////////////
let state_builder_t (max_arity:nat) (args:arg_list) (codom:Type) =
h0:HS.mem{mem_roots_p h0 args} ->
GTot codom
// Splitting the construction of the initial state into two functions
// one that creates the initial trusted state (i.e., part of our TCB)
// and another that just creates the vale state, a view upon the trusted one
let create_initial_trusted_state
(max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(args:arg_list)
: state_builder_t max_arity args (BS.machine_state & interop_heap) =
fun h0 ->
let open MS in
let regs_64 = register_of_args max_arity arg_reg (List.Tot.length args) args IA.init_regs in
let xmms = IA.init_xmms in
let flags = FunctionalExtensionality.on flag IA.init_flags in
let init_rsp = regs_64 rRsp in
let regs = FunctionalExtensionality.on_dom reg #t_reg (fun r ->
match r with
| Reg 0 r -> regs_64 r
| Reg 1 r -> xmms r)
in
// Create an initial empty stack
let stack = Map.const_on Set.empty 0 in
// Spill additional arguments on the stack
let stack = stack_of_args max_arity (List.Tot.length args) init_rsp args stack in
let mem:interop_heap = mk_mem args h0 in
let memTaint = create_memtaint mem (args_b8 args) (mk_taint args init_taint) in
let (s0:BS.machine_state) = {
BS.ms_ok = true;
BS.ms_regs = regs;
BS.ms_flags = flags;
BS.ms_heap = heap_create_impl mem memTaint;
BS.ms_stack = BS.Machine_stack init_rsp stack;
BS.ms_stackTaint = Map.const Public;
BS.ms_trace = [];
} in
(s0, mem)
////////////////////////////////////////////////////////////////////////////////
let prediction_pre_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
prop
let return_val_t (sn:BS.machine_state) = r:UInt64.t{UInt64.v r == BS.eval_reg_64 MS.rRax sn}
let return_val (sn:BS.machine_state) : return_val_t sn =
UInt64.uint_to_t (BS.eval_reg_64 MS.rRax sn)
let prediction_post_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
s0:BS.machine_state ->
(UInt64.t & nat & interop_heap) ->
sn:BS.machine_state ->
prop
[@__reduce__]
let prediction_pre
(n:nat)
(arg_reg:arg_reg_relation n)
(c:BS.code)
(args:arg_list)
(pre_rel: prediction_pre_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
=
pre_rel h0 /\
s0 == fst (create_initial_trusted_state n arg_reg args h0)
[@__reduce__]
let prediction_post
(n:nat)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(post_rel: prediction_post_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
(rax_fuel_mem:(UInt64.t & nat & interop_heap)) =
let (rax, fuel, final_mem) = rax_fuel_mem in
Some? (BS.machine_eval_code c fuel s0) /\ (
let s1 = Some?.v (BS.machine_eval_code c fuel s0) in
let h1 = hs_of_mem final_mem in
FStar.HyperStack.ST.equal_domains h0 h1 /\
B.modifies (loc_modified_args args) h0 h1 /\
mem_roots_p h1 args /\
heap_create_machine (mk_mem args h1) == heap_get s1.BS.ms_heap /\
calling_conventions s0 s1 regs_modified xmms_modified /\
rax == return_val s1 /\
post_rel h0 s0 rax_fuel_mem s1
)
let prediction
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(pre_rel:prediction_pre_rel_t c args)
(post_rel:prediction_post_rel_t c args) =
h0:mem_roots args{pre_rel h0} ->
s0:BS.machine_state ->
Ghost (UInt64.t & nat & interop_heap)
(requires prediction_pre n arg_reg c args pre_rel h0 s0)
(ensures prediction_post n regs_modified xmms_modified c args post_rel h0 s0)
noeq
type as_lowstar_sig_ret =
| As_lowstar_sig_ret :
n:nat ->
args:arg_list ->
fuel:nat ->
final_mem:interop_heap ->
as_lowstar_sig_ret
let als_ret = UInt64.t & Ghost.erased as_lowstar_sig_ret
[@__reduce__]
let as_lowstar_sig_post
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret:als_ret)
(h1:HS.mem) =
(* write it this way to be reduction friendly *)
let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\
n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
h1 == hs_of_mem final_mem /\
prediction_pre n arg_reg c args pre_rel h0 s0 /\
(rax, fuel, final_mem) == predict h0 s0 /\
prediction_post n regs_modified xmms_modified c args post_rel h0 s0 (rax, fuel, final_mem) /\
FStar.HyperStack.ST.equal_domains h0 h1)
[@__reduce__]
let as_lowstar_sig_post_weak
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret:als_ret)
(h1:HS.mem) =
(* write it this way to be reduction friendly *)
let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\
n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
(exists fuel
final_mem
s1.
h1 == hs_of_mem final_mem /\
rax == return_val s1 /\
post_rel h0 s0 (return_val s1, fuel, final_mem) s1))
[@__reduce__]
let as_lowstar_sig (c:BS.code) =
n:nat ->
arg_reg:arg_reg_relation n ->
regs_modified:(MS.reg_64 -> bool) ->
xmms_modified:(MS.reg_xmm -> bool) ->
args:arg_list ->
#pre_rel:_ ->
#post_rel:_ ->
predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel ->
FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 -> mem_roots_p h0 args /\ pre_rel h0))
(ensures fun h0 ret h1 -> as_lowstar_sig_post n arg_reg regs_modified xmms_modified c args h0 predict ret h1)
val wrap_variadic (c:BS.code) : as_lowstar_sig c
[@__reduce__]
let (++) (#t:td) (x:td_as_type t) (args:list arg) = (| t, x |) :: args
[@__reduce__]
let rec rel_gen_t
(c:BS.code)
(td:list td)
(args:arg_list{List.length args + List.length td <= 20}) | false | false | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val rel_gen_t : c: Vale.X64.Machine_Semantics_s.code ->
td: Prims.list Vale.Interop.Base.td ->
args:
Vale.Interop.X64.arg_list
{FStar.List.Tot.Base.length args + FStar.List.Tot.Base.length td <= 20} ->
f: (_: Vale.Interop.X64.arg_list -> Type)
-> Type | [
"recursion"
] | Vale.Interop.X64.rel_gen_t | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
c: Vale.X64.Machine_Semantics_s.code ->
td: Prims.list Vale.Interop.Base.td ->
args:
Vale.Interop.X64.arg_list
{FStar.List.Tot.Base.length args + FStar.List.Tot.Base.length td <= 20} ->
f: (_: Vale.Interop.X64.arg_list -> Type)
-> Type | {
"end_col": 32,
"end_line": 409,
"start_col": 4,
"start_line": 405
} |
|
Prims.GTot | val register_of_args
(max_arity: nat)
(arg_reg: arg_reg_relation max_arity)
(n: nat)
(args: arg_list{List.Tot.length args = n})
(regs: registers)
: GTot (regs': registers{regs MS.rRsp == regs' MS.rRsp}) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec register_of_args (max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(n:nat)
(args:arg_list{List.Tot.length args = n})
(regs:registers) : GTot (regs':registers{regs MS.rRsp == regs' MS.rRsp}) =
match args with
| [] -> regs
| hd::tl ->
if n > max_arity then
// This arguments will be passed on the stack
register_of_args max_arity arg_reg (n-1) tl regs
else
update_regs max_arity arg_reg hd (n - 1) (register_of_args max_arity arg_reg (n - 1) tl regs) | val register_of_args
(max_arity: nat)
(arg_reg: arg_reg_relation max_arity)
(n: nat)
(args: arg_list{List.Tot.length args = n})
(regs: registers)
: GTot (regs': registers{regs MS.rRsp == regs' MS.rRsp})
let rec register_of_args
(max_arity: nat)
(arg_reg: arg_reg_relation max_arity)
(n: nat)
(args: arg_list{List.Tot.length args = n})
(regs: registers)
: GTot (regs': registers{regs MS.rRsp == regs' MS.rRsp}) = | false | null | false | match args with
| [] -> regs
| hd :: tl ->
if n > max_arity
then register_of_args max_arity arg_reg (n - 1) tl regs
else update_regs max_arity arg_reg hd (n - 1) (register_of_args max_arity arg_reg (n - 1) tl regs) | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"sometrivial"
] | [
"Prims.nat",
"Vale.Interop.X64.arg_reg_relation",
"Vale.Interop.X64.arg_list",
"Prims.b2t",
"Prims.op_Equality",
"FStar.List.Tot.Base.length",
"Vale.Interop.Base.arg",
"Vale.Interop.X64.registers",
"Prims.list",
"Prims.op_GreaterThan",
"Vale.Interop.X64.register_of_args",
"Prims.op_Subtraction",
"Prims.bool",
"Vale.Interop.X64.update_regs",
"Prims.eq2",
"Vale.X64.Machine_s.nat64",
"Vale.X64.Machine_s.rRsp"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n
unfold
let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r})
let registers = MS.reg_64 -> MS.nat64
let upd_reg (n:nat) (arg_reg:arg_reg_relation n) (regs:registers) (i:nat) (v:_) : registers =
fun (r:MS.reg_64) ->
match arg_reg.of_reg r with
| Some j ->
if i = j then v
else regs r
| _ -> regs r
[@__reduce__]
let arg_as_nat64 (a:arg) : GTot MS.nat64 =
let (| tag, x |) = a in
match tag with
| TD_Base TUInt8 ->
UInt8.v x
| TD_Base TUInt16 ->
UInt16.v x
| TD_Base TUInt32 ->
UInt32.v x
| TD_Base TUInt64 ->
UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x)
[@__reduce__]
let update_regs (n:nat)
(arg_reg:arg_reg_relation n)
(x:arg)
(i:reg_nat n)
(regs:registers)
: GTot registers
= upd_reg n arg_reg regs i (arg_as_nat64 x)
[@__reduce__]
let rec register_of_args (max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(n:nat)
(args:arg_list{List.Tot.length args = n}) | false | false | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val register_of_args
(max_arity: nat)
(arg_reg: arg_reg_relation max_arity)
(n: nat)
(args: arg_list{List.Tot.length args = n})
(regs: registers)
: GTot (regs': registers{regs MS.rRsp == regs' MS.rRsp}) | [
"recursion"
] | Vale.Interop.X64.register_of_args | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
max_arity: Prims.nat ->
arg_reg: Vale.Interop.X64.arg_reg_relation max_arity ->
n: Prims.nat ->
args: Vale.Interop.X64.arg_list{FStar.List.Tot.Base.length args = n} ->
regs: Vale.Interop.X64.registers
-> Prims.GTot
(regs':
Vale.Interop.X64.registers{regs Vale.X64.Machine_s.rRsp == regs' Vale.X64.Machine_s.rRsp}) | {
"end_col": 101,
"end_line": 113,
"start_col": 4,
"start_line": 106
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec prediction_t
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:arg_list{List.length dom + List.length args <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c))
=
match dom with
| [] ->
prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel
| hd::tl ->
x:td_as_type hd ->
prediction_t
n
arg_reg
regs_modified
xmms_modified
c
tl
(x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x) | let rec prediction_t
(n: nat)
(arg_reg: arg_reg_relation n)
(regs_modified: (MS.reg_64 -> bool))
(xmms_modified: (MS.reg_xmm -> bool))
(c: BS.code)
(dom: list td)
(args: arg_list{List.length dom + List.length args <= 20})
(pre_rel: rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel: rel_gen_t c dom args (prediction_post_rel_t c))
= | false | null | false | match dom with
| [] -> prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel
| hd :: tl ->
x: td_as_type hd
-> prediction_t n
arg_reg
regs_modified
xmms_modified
c
tl
(x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x) | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"total"
] | [
"Prims.nat",
"Vale.Interop.X64.arg_reg_relation",
"Vale.X64.Machine_s.reg_64",
"Prims.bool",
"Vale.X64.Machine_s.reg_xmm",
"Vale.X64.Machine_Semantics_s.code",
"Prims.list",
"Vale.Interop.Base.td",
"Vale.Interop.X64.arg_list",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.List.Tot.Base.length",
"Vale.Interop.Base.arg",
"Vale.Interop.X64.rel_gen_t",
"Vale.Interop.X64.prediction_pre_rel_t",
"Vale.Interop.X64.prediction_post_rel_t",
"Vale.Interop.X64.prediction",
"Vale.Interop.Base.td_as_type",
"Vale.Interop.X64.prediction_t",
"Vale.Interop.X64.op_Plus_Plus",
"Vale.Interop.X64.elim_rel_gen_t_cons"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n
unfold
let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r})
let registers = MS.reg_64 -> MS.nat64
let upd_reg (n:nat) (arg_reg:arg_reg_relation n) (regs:registers) (i:nat) (v:_) : registers =
fun (r:MS.reg_64) ->
match arg_reg.of_reg r with
| Some j ->
if i = j then v
else regs r
| _ -> regs r
[@__reduce__]
let arg_as_nat64 (a:arg) : GTot MS.nat64 =
let (| tag, x |) = a in
match tag with
| TD_Base TUInt8 ->
UInt8.v x
| TD_Base TUInt16 ->
UInt16.v x
| TD_Base TUInt32 ->
UInt32.v x
| TD_Base TUInt64 ->
UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x)
[@__reduce__]
let update_regs (n:nat)
(arg_reg:arg_reg_relation n)
(x:arg)
(i:reg_nat n)
(regs:registers)
: GTot registers
= upd_reg n arg_reg regs i (arg_as_nat64 x)
[@__reduce__]
let rec register_of_args (max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(n:nat)
(args:arg_list{List.Tot.length args = n})
(regs:registers) : GTot (regs':registers{regs MS.rRsp == regs' MS.rRsp}) =
match args with
| [] -> regs
| hd::tl ->
if n > max_arity then
// This arguments will be passed on the stack
register_of_args max_arity arg_reg (n-1) tl regs
else
update_regs max_arity arg_reg hd (n - 1) (register_of_args max_arity arg_reg (n - 1) tl regs)
// Pass extra arguments on the stack. The arity_ok condition on inline wrappers ensures that
// this only happens for stdcalls
[@__reduce__]
let rec stack_of_args (max_arity:nat)
(n:nat)
(rsp:int)
(args:arg_list{List.Tot.length args = n})
(st:Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) =
match args with
| [] -> st
| hd::tl ->
if n <= max_arity then st // We can pass the remaining args in registers
else
let ptr = ((n - max_arity) - 1) * 8 // Arguments on the stack are pushed from right to left
+ (if IA.win then 32 else 0) // The shadow space on Windows comes next
+ 8 // The return address is then pushed on the stack
+ rsp // And we then have all the extra slots required for the Vale procedure
in
let st1 = stack_of_args max_arity (n-1) rsp tl st in
let v = arg_as_nat64 hd in // We will store the arg hd
BS.update_heap64 ptr v st1
////////////////////////////////////////////////////////////////////////////////
let taint_map = b8 -> GTot taint
let upd_taint_map_b8 (tm:taint_map) (x:b8) (tnt:taint) : taint_map =
fun (y:b8) ->
if StrongExcludedMiddle.strong_excluded_middle ((x <: b8) == y) then
tnt
else tm y
[@__reduce__]
let upd_taint_map_arg (a:arg) (tm:taint_map) : GTot taint_map =
match a with
| (| TD_Buffer _ _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (Buffer true x) tnt
| (| TD_ImmBuffer src _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (imm_to_b8 src x) tnt
| (| TD_Base _, _ |) ->
tm
let init_taint : taint_map = fun r -> Public
[@__reduce__]
let mk_taint (args:arg_list_sb) (tm:taint_map) : GTot taint_map =
List.fold_right_gtot args upd_taint_map_arg init_taint
let taint_of_arg (a:arg) =
let (| tag, x |) = a in
match tag with
| TD_ImmBuffer _ TUInt64 {taint=tnt}
| TD_ImmBuffer _ TUInt128 {taint=tnt}
| TD_Buffer _ TUInt64 {taint=tnt}
| TD_Buffer _ TUInt128 {taint=tnt} -> Some tnt
| _ -> None
let taint_arg_b8 (a:arg{Some? (taint_of_arg a)}) : GTot b8 =
let (| tag, x |) = a in
match tag with
| TD_Buffer src _ _ -> Buffer true (x <: B.buffer (base_typ_as_type src))
| TD_ImmBuffer src _ _ -> imm_to_b8 src x
let rec taint_arg_args_b8_mem (args:arg_list) (a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
List.memP (taint_arg_b8 a) (args_b8 args))
= match args with
| [] -> ()
| hd::tl ->
taint_arg_args_b8_mem tl a
let rec mk_taint_equiv
(args:arg_list_sb{disjoint_or_eq args})
(a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
Some?.v (taint_of_arg a) == (mk_taint args init_taint) (taint_arg_b8 a))
= match args with
| [] -> ()
| hd::tl ->
mk_taint_equiv tl a;
let (| tag, x |) = hd in
match tag with
| TD_Base _ -> ()
| TD_Buffer _ _ _ | TD_ImmBuffer _ _ _ ->
disjoint_or_eq_cons hd tl;
BigOps.big_and'_forall (disjoint_or_eq_1 hd) tl
////////////////////////////////////////////////////////////////////////////////
let state_builder_t (max_arity:nat) (args:arg_list) (codom:Type) =
h0:HS.mem{mem_roots_p h0 args} ->
GTot codom
// Splitting the construction of the initial state into two functions
// one that creates the initial trusted state (i.e., part of our TCB)
// and another that just creates the vale state, a view upon the trusted one
let create_initial_trusted_state
(max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(args:arg_list)
: state_builder_t max_arity args (BS.machine_state & interop_heap) =
fun h0 ->
let open MS in
let regs_64 = register_of_args max_arity arg_reg (List.Tot.length args) args IA.init_regs in
let xmms = IA.init_xmms in
let flags = FunctionalExtensionality.on flag IA.init_flags in
let init_rsp = regs_64 rRsp in
let regs = FunctionalExtensionality.on_dom reg #t_reg (fun r ->
match r with
| Reg 0 r -> regs_64 r
| Reg 1 r -> xmms r)
in
// Create an initial empty stack
let stack = Map.const_on Set.empty 0 in
// Spill additional arguments on the stack
let stack = stack_of_args max_arity (List.Tot.length args) init_rsp args stack in
let mem:interop_heap = mk_mem args h0 in
let memTaint = create_memtaint mem (args_b8 args) (mk_taint args init_taint) in
let (s0:BS.machine_state) = {
BS.ms_ok = true;
BS.ms_regs = regs;
BS.ms_flags = flags;
BS.ms_heap = heap_create_impl mem memTaint;
BS.ms_stack = BS.Machine_stack init_rsp stack;
BS.ms_stackTaint = Map.const Public;
BS.ms_trace = [];
} in
(s0, mem)
////////////////////////////////////////////////////////////////////////////////
let prediction_pre_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
prop
let return_val_t (sn:BS.machine_state) = r:UInt64.t{UInt64.v r == BS.eval_reg_64 MS.rRax sn}
let return_val (sn:BS.machine_state) : return_val_t sn =
UInt64.uint_to_t (BS.eval_reg_64 MS.rRax sn)
let prediction_post_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
s0:BS.machine_state ->
(UInt64.t & nat & interop_heap) ->
sn:BS.machine_state ->
prop
[@__reduce__]
let prediction_pre
(n:nat)
(arg_reg:arg_reg_relation n)
(c:BS.code)
(args:arg_list)
(pre_rel: prediction_pre_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
=
pre_rel h0 /\
s0 == fst (create_initial_trusted_state n arg_reg args h0)
[@__reduce__]
let prediction_post
(n:nat)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(post_rel: prediction_post_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
(rax_fuel_mem:(UInt64.t & nat & interop_heap)) =
let (rax, fuel, final_mem) = rax_fuel_mem in
Some? (BS.machine_eval_code c fuel s0) /\ (
let s1 = Some?.v (BS.machine_eval_code c fuel s0) in
let h1 = hs_of_mem final_mem in
FStar.HyperStack.ST.equal_domains h0 h1 /\
B.modifies (loc_modified_args args) h0 h1 /\
mem_roots_p h1 args /\
heap_create_machine (mk_mem args h1) == heap_get s1.BS.ms_heap /\
calling_conventions s0 s1 regs_modified xmms_modified /\
rax == return_val s1 /\
post_rel h0 s0 rax_fuel_mem s1
)
let prediction
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(pre_rel:prediction_pre_rel_t c args)
(post_rel:prediction_post_rel_t c args) =
h0:mem_roots args{pre_rel h0} ->
s0:BS.machine_state ->
Ghost (UInt64.t & nat & interop_heap)
(requires prediction_pre n arg_reg c args pre_rel h0 s0)
(ensures prediction_post n regs_modified xmms_modified c args post_rel h0 s0)
noeq
type as_lowstar_sig_ret =
| As_lowstar_sig_ret :
n:nat ->
args:arg_list ->
fuel:nat ->
final_mem:interop_heap ->
as_lowstar_sig_ret
let als_ret = UInt64.t & Ghost.erased as_lowstar_sig_ret
[@__reduce__]
let as_lowstar_sig_post
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret:als_ret)
(h1:HS.mem) =
(* write it this way to be reduction friendly *)
let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\
n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
h1 == hs_of_mem final_mem /\
prediction_pre n arg_reg c args pre_rel h0 s0 /\
(rax, fuel, final_mem) == predict h0 s0 /\
prediction_post n regs_modified xmms_modified c args post_rel h0 s0 (rax, fuel, final_mem) /\
FStar.HyperStack.ST.equal_domains h0 h1)
[@__reduce__]
let as_lowstar_sig_post_weak
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret:als_ret)
(h1:HS.mem) =
(* write it this way to be reduction friendly *)
let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\
n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
(exists fuel
final_mem
s1.
h1 == hs_of_mem final_mem /\
rax == return_val s1 /\
post_rel h0 s0 (return_val s1, fuel, final_mem) s1))
[@__reduce__]
let as_lowstar_sig (c:BS.code) =
n:nat ->
arg_reg:arg_reg_relation n ->
regs_modified:(MS.reg_64 -> bool) ->
xmms_modified:(MS.reg_xmm -> bool) ->
args:arg_list ->
#pre_rel:_ ->
#post_rel:_ ->
predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel ->
FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 -> mem_roots_p h0 args /\ pre_rel h0))
(ensures fun h0 ret h1 -> as_lowstar_sig_post n arg_reg regs_modified xmms_modified c args h0 predict ret h1)
val wrap_variadic (c:BS.code) : as_lowstar_sig c
[@__reduce__]
let (++) (#t:td) (x:td_as_type t) (args:list arg) = (| t, x |) :: args
[@__reduce__]
let rec rel_gen_t
(c:BS.code)
(td:list td)
(args:arg_list{List.length args + List.length td <= 20})
(f: arg_list -> Type) =
match td with
| [] -> f args
| hd::tl ->
x:td_as_type hd ->
rel_gen_t c tl (x++args) f
[@__reduce__]
let elim_rel_gen_t_nil #c #args #f (x:rel_gen_t c [] args f)
: f args
= x
[@__reduce__]
let elim_rel_gen_t_cons #c hd tl #args #f (p:rel_gen_t c (hd::tl) args f)
: (x:td_as_type hd ->
rel_gen_t c tl (x++args) f)
= p
let rec prediction_t
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:arg_list{List.length dom + List.length args <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c)) | false | false | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val prediction_t : n: Prims.nat ->
arg_reg: Vale.Interop.X64.arg_reg_relation n ->
regs_modified: (_: Vale.X64.Machine_s.reg_64 -> Prims.bool) ->
xmms_modified: (_: Vale.X64.Machine_s.reg_xmm -> Prims.bool) ->
c: Vale.X64.Machine_Semantics_s.code ->
dom: Prims.list Vale.Interop.Base.td ->
args:
Vale.Interop.X64.arg_list
{FStar.List.Tot.Base.length dom + FStar.List.Tot.Base.length args <= 20} ->
pre_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_pre_rel_t c) ->
post_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_post_rel_t c)
-> Type | [
"recursion"
] | Vale.Interop.X64.prediction_t | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
n: Prims.nat ->
arg_reg: Vale.Interop.X64.arg_reg_relation n ->
regs_modified: (_: Vale.X64.Machine_s.reg_64 -> Prims.bool) ->
xmms_modified: (_: Vale.X64.Machine_s.reg_xmm -> Prims.bool) ->
c: Vale.X64.Machine_Semantics_s.code ->
dom: Prims.list Vale.Interop.Base.td ->
args:
Vale.Interop.X64.arg_list
{FStar.List.Tot.Base.length dom + FStar.List.Tot.Base.length args <= 20} ->
pre_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_pre_rel_t c) ->
post_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_post_rel_t c)
-> Type | {
"end_col": 48,
"end_line": 448,
"start_col": 4,
"start_line": 433
} |
|
FStar.Pervasives.Lemma | val mk_taint_equiv (args: arg_list_sb{disjoint_or_eq args}) (a: arg)
: Lemma
(List.memP a args /\ Some? (taint_of_arg a) ==>
Some?.v (taint_of_arg a) == (mk_taint args init_taint) (taint_arg_b8 a)) | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mk_taint_equiv
(args:arg_list_sb{disjoint_or_eq args})
(a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
Some?.v (taint_of_arg a) == (mk_taint args init_taint) (taint_arg_b8 a))
= match args with
| [] -> ()
| hd::tl ->
mk_taint_equiv tl a;
let (| tag, x |) = hd in
match tag with
| TD_Base _ -> ()
| TD_Buffer _ _ _ | TD_ImmBuffer _ _ _ ->
disjoint_or_eq_cons hd tl;
BigOps.big_and'_forall (disjoint_or_eq_1 hd) tl | val mk_taint_equiv (args: arg_list_sb{disjoint_or_eq args}) (a: arg)
: Lemma
(List.memP a args /\ Some? (taint_of_arg a) ==>
Some?.v (taint_of_arg a) == (mk_taint args init_taint) (taint_arg_b8 a))
let rec mk_taint_equiv (args: arg_list_sb{disjoint_or_eq args}) (a: arg)
: Lemma
(List.memP a args /\ Some? (taint_of_arg a) ==>
Some?.v (taint_of_arg a) == (mk_taint args init_taint) (taint_arg_b8 a)) = | false | null | true | match args with
| [] -> ()
| hd :: tl ->
mk_taint_equiv tl a;
let (| tag , x |) = hd in
match tag with
| TD_Base _ -> ()
| TD_Buffer _ _ _
| TD_ImmBuffer _ _ _ ->
disjoint_or_eq_cons hd tl;
BigOps.big_and'_forall (disjoint_or_eq_1 hd) tl | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"lemma"
] | [
"Vale.Interop.X64.arg_list_sb",
"Vale.Interop.Base.disjoint_or_eq",
"Vale.Interop.Base.arg",
"Prims.list",
"Vale.Interop.Base.td",
"Vale.Interop.Base.td_as_type",
"Vale.Interop.Base.valid_base_type",
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.Interop.Base.buffer_qualifiers",
"FStar.BigOps.big_and'_forall",
"Vale.Interop.Base.disjoint_or_eq_1",
"Prims.unit",
"Vale.Interop.Base.disjoint_or_eq_cons",
"Vale.Interop.X64.mk_taint_equiv",
"Prims.l_True",
"Prims.squash",
"Prims.l_imp",
"Prims.l_and",
"FStar.List.Tot.Base.memP",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"Vale.Arch.HeapTypes_s.taint",
"Vale.Interop.X64.taint_of_arg",
"Prims.eq2",
"FStar.Pervasives.Native.__proj__Some__item__v",
"Vale.Interop.X64.mk_taint",
"Vale.Interop.X64.init_taint",
"Vale.Interop.X64.taint_arg_b8",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n
unfold
let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r})
let registers = MS.reg_64 -> MS.nat64
let upd_reg (n:nat) (arg_reg:arg_reg_relation n) (regs:registers) (i:nat) (v:_) : registers =
fun (r:MS.reg_64) ->
match arg_reg.of_reg r with
| Some j ->
if i = j then v
else regs r
| _ -> regs r
[@__reduce__]
let arg_as_nat64 (a:arg) : GTot MS.nat64 =
let (| tag, x |) = a in
match tag with
| TD_Base TUInt8 ->
UInt8.v x
| TD_Base TUInt16 ->
UInt16.v x
| TD_Base TUInt32 ->
UInt32.v x
| TD_Base TUInt64 ->
UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x)
[@__reduce__]
let update_regs (n:nat)
(arg_reg:arg_reg_relation n)
(x:arg)
(i:reg_nat n)
(regs:registers)
: GTot registers
= upd_reg n arg_reg regs i (arg_as_nat64 x)
[@__reduce__]
let rec register_of_args (max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(n:nat)
(args:arg_list{List.Tot.length args = n})
(regs:registers) : GTot (regs':registers{regs MS.rRsp == regs' MS.rRsp}) =
match args with
| [] -> regs
| hd::tl ->
if n > max_arity then
// This arguments will be passed on the stack
register_of_args max_arity arg_reg (n-1) tl regs
else
update_regs max_arity arg_reg hd (n - 1) (register_of_args max_arity arg_reg (n - 1) tl regs)
// Pass extra arguments on the stack. The arity_ok condition on inline wrappers ensures that
// this only happens for stdcalls
[@__reduce__]
let rec stack_of_args (max_arity:nat)
(n:nat)
(rsp:int)
(args:arg_list{List.Tot.length args = n})
(st:Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) =
match args with
| [] -> st
| hd::tl ->
if n <= max_arity then st // We can pass the remaining args in registers
else
let ptr = ((n - max_arity) - 1) * 8 // Arguments on the stack are pushed from right to left
+ (if IA.win then 32 else 0) // The shadow space on Windows comes next
+ 8 // The return address is then pushed on the stack
+ rsp // And we then have all the extra slots required for the Vale procedure
in
let st1 = stack_of_args max_arity (n-1) rsp tl st in
let v = arg_as_nat64 hd in // We will store the arg hd
BS.update_heap64 ptr v st1
////////////////////////////////////////////////////////////////////////////////
let taint_map = b8 -> GTot taint
let upd_taint_map_b8 (tm:taint_map) (x:b8) (tnt:taint) : taint_map =
fun (y:b8) ->
if StrongExcludedMiddle.strong_excluded_middle ((x <: b8) == y) then
tnt
else tm y
[@__reduce__]
let upd_taint_map_arg (a:arg) (tm:taint_map) : GTot taint_map =
match a with
| (| TD_Buffer _ _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (Buffer true x) tnt
| (| TD_ImmBuffer src _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (imm_to_b8 src x) tnt
| (| TD_Base _, _ |) ->
tm
let init_taint : taint_map = fun r -> Public
[@__reduce__]
let mk_taint (args:arg_list_sb) (tm:taint_map) : GTot taint_map =
List.fold_right_gtot args upd_taint_map_arg init_taint
let taint_of_arg (a:arg) =
let (| tag, x |) = a in
match tag with
| TD_ImmBuffer _ TUInt64 {taint=tnt}
| TD_ImmBuffer _ TUInt128 {taint=tnt}
| TD_Buffer _ TUInt64 {taint=tnt}
| TD_Buffer _ TUInt128 {taint=tnt} -> Some tnt
| _ -> None
let taint_arg_b8 (a:arg{Some? (taint_of_arg a)}) : GTot b8 =
let (| tag, x |) = a in
match tag with
| TD_Buffer src _ _ -> Buffer true (x <: B.buffer (base_typ_as_type src))
| TD_ImmBuffer src _ _ -> imm_to_b8 src x
let rec taint_arg_args_b8_mem (args:arg_list) (a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
List.memP (taint_arg_b8 a) (args_b8 args))
= match args with
| [] -> ()
| hd::tl ->
taint_arg_args_b8_mem tl a
let rec mk_taint_equiv
(args:arg_list_sb{disjoint_or_eq args})
(a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==> | false | false | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_taint_equiv (args: arg_list_sb{disjoint_or_eq args}) (a: arg)
: Lemma
(List.memP a args /\ Some? (taint_of_arg a) ==>
Some?.v (taint_of_arg a) == (mk_taint args init_taint) (taint_arg_b8 a)) | [
"recursion"
] | Vale.Interop.X64.mk_taint_equiv | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
args: Vale.Interop.X64.arg_list_sb{Vale.Interop.Base.disjoint_or_eq args} ->
a: Vale.Interop.Base.arg
-> FStar.Pervasives.Lemma
(ensures
FStar.List.Tot.Base.memP a args /\ Some? (Vale.Interop.X64.taint_of_arg a) ==>
Some?.v (Vale.Interop.X64.taint_of_arg a) ==
Vale.Interop.X64.mk_taint args Vale.Interop.X64.init_taint (Vale.Interop.X64.taint_arg_b8 a)) | {
"end_col": 56,
"end_line": 200,
"start_col": 5,
"start_line": 191
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec as_lowstar_sig_t
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:arg_list{List.length args + List.length dom <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c))
(predict:prediction_t n arg_reg regs_modified xmms_modified c dom args pre_rel post_rel) =
match dom with
| [] ->
(unit ->
FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 ->
mem_roots_p h0 args /\
elim_rel_gen_t_nil pre_rel h0))
(ensures fun h0 ret h1 ->
as_lowstar_sig_post n arg_reg regs_modified xmms_modified c args h0
#pre_rel #post_rel (elim_predict_t_nil predict) ret h1))
| hd::tl ->
x:td_as_type hd ->
as_lowstar_sig_t
n
arg_reg
regs_modified
xmms_modified
c
tl
(x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x)
(elim_predict_t_cons hd tl predict x) | let rec as_lowstar_sig_t
(n: nat)
(arg_reg: arg_reg_relation n)
(regs_modified: (MS.reg_64 -> bool))
(xmms_modified: (MS.reg_xmm -> bool))
(c: BS.code)
(dom: list td)
(args: arg_list{List.length args + List.length dom <= 20})
(pre_rel: rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel: rel_gen_t c dom args (prediction_post_rel_t c))
(predict: prediction_t n arg_reg regs_modified xmms_modified c dom args pre_rel post_rel)
= | false | null | false | match dom with
| [] ->
(unit
-> FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 -> mem_roots_p h0 args /\ elim_rel_gen_t_nil pre_rel h0))
(ensures
fun h0 ret h1 ->
as_lowstar_sig_post n arg_reg regs_modified xmms_modified c args h0 #pre_rel #post_rel
(elim_predict_t_nil predict) ret h1))
| hd :: tl ->
x: td_as_type hd
-> as_lowstar_sig_t n arg_reg regs_modified xmms_modified c tl (x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x) (elim_rel_gen_t_cons hd tl post_rel x)
(elim_predict_t_cons hd tl predict x) | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"total"
] | [
"Prims.nat",
"Vale.Interop.X64.arg_reg_relation",
"Vale.X64.Machine_s.reg_64",
"Prims.bool",
"Vale.X64.Machine_s.reg_xmm",
"Vale.X64.Machine_Semantics_s.code",
"Prims.list",
"Vale.Interop.Base.td",
"Vale.Interop.X64.arg_list",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.List.Tot.Base.length",
"Vale.Interop.Base.arg",
"Vale.Interop.X64.rel_gen_t",
"Vale.Interop.X64.prediction_pre_rel_t",
"Vale.Interop.X64.prediction_post_rel_t",
"Vale.Interop.X64.prediction_t",
"Prims.unit",
"Vale.Interop.X64.als_ret",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Vale.Interop.Base.mem_roots_p",
"Vale.Interop.X64.elim_rel_gen_t_nil",
"Vale.Interop.X64.as_lowstar_sig_post",
"Vale.Interop.X64.elim_predict_t_nil",
"Vale.Interop.Base.td_as_type",
"Vale.Interop.X64.as_lowstar_sig_t",
"Vale.Interop.X64.op_Plus_Plus",
"Vale.Interop.X64.elim_rel_gen_t_cons",
"Vale.Interop.X64.elim_predict_t_cons"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n
unfold
let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r})
let registers = MS.reg_64 -> MS.nat64
let upd_reg (n:nat) (arg_reg:arg_reg_relation n) (regs:registers) (i:nat) (v:_) : registers =
fun (r:MS.reg_64) ->
match arg_reg.of_reg r with
| Some j ->
if i = j then v
else regs r
| _ -> regs r
[@__reduce__]
let arg_as_nat64 (a:arg) : GTot MS.nat64 =
let (| tag, x |) = a in
match tag with
| TD_Base TUInt8 ->
UInt8.v x
| TD_Base TUInt16 ->
UInt16.v x
| TD_Base TUInt32 ->
UInt32.v x
| TD_Base TUInt64 ->
UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x)
[@__reduce__]
let update_regs (n:nat)
(arg_reg:arg_reg_relation n)
(x:arg)
(i:reg_nat n)
(regs:registers)
: GTot registers
= upd_reg n arg_reg regs i (arg_as_nat64 x)
[@__reduce__]
let rec register_of_args (max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(n:nat)
(args:arg_list{List.Tot.length args = n})
(regs:registers) : GTot (regs':registers{regs MS.rRsp == regs' MS.rRsp}) =
match args with
| [] -> regs
| hd::tl ->
if n > max_arity then
// This arguments will be passed on the stack
register_of_args max_arity arg_reg (n-1) tl regs
else
update_regs max_arity arg_reg hd (n - 1) (register_of_args max_arity arg_reg (n - 1) tl regs)
// Pass extra arguments on the stack. The arity_ok condition on inline wrappers ensures that
// this only happens for stdcalls
[@__reduce__]
let rec stack_of_args (max_arity:nat)
(n:nat)
(rsp:int)
(args:arg_list{List.Tot.length args = n})
(st:Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) =
match args with
| [] -> st
| hd::tl ->
if n <= max_arity then st // We can pass the remaining args in registers
else
let ptr = ((n - max_arity) - 1) * 8 // Arguments on the stack are pushed from right to left
+ (if IA.win then 32 else 0) // The shadow space on Windows comes next
+ 8 // The return address is then pushed on the stack
+ rsp // And we then have all the extra slots required for the Vale procedure
in
let st1 = stack_of_args max_arity (n-1) rsp tl st in
let v = arg_as_nat64 hd in // We will store the arg hd
BS.update_heap64 ptr v st1
////////////////////////////////////////////////////////////////////////////////
let taint_map = b8 -> GTot taint
let upd_taint_map_b8 (tm:taint_map) (x:b8) (tnt:taint) : taint_map =
fun (y:b8) ->
if StrongExcludedMiddle.strong_excluded_middle ((x <: b8) == y) then
tnt
else tm y
[@__reduce__]
let upd_taint_map_arg (a:arg) (tm:taint_map) : GTot taint_map =
match a with
| (| TD_Buffer _ _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (Buffer true x) tnt
| (| TD_ImmBuffer src _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (imm_to_b8 src x) tnt
| (| TD_Base _, _ |) ->
tm
let init_taint : taint_map = fun r -> Public
[@__reduce__]
let mk_taint (args:arg_list_sb) (tm:taint_map) : GTot taint_map =
List.fold_right_gtot args upd_taint_map_arg init_taint
let taint_of_arg (a:arg) =
let (| tag, x |) = a in
match tag with
| TD_ImmBuffer _ TUInt64 {taint=tnt}
| TD_ImmBuffer _ TUInt128 {taint=tnt}
| TD_Buffer _ TUInt64 {taint=tnt}
| TD_Buffer _ TUInt128 {taint=tnt} -> Some tnt
| _ -> None
let taint_arg_b8 (a:arg{Some? (taint_of_arg a)}) : GTot b8 =
let (| tag, x |) = a in
match tag with
| TD_Buffer src _ _ -> Buffer true (x <: B.buffer (base_typ_as_type src))
| TD_ImmBuffer src _ _ -> imm_to_b8 src x
let rec taint_arg_args_b8_mem (args:arg_list) (a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
List.memP (taint_arg_b8 a) (args_b8 args))
= match args with
| [] -> ()
| hd::tl ->
taint_arg_args_b8_mem tl a
let rec mk_taint_equiv
(args:arg_list_sb{disjoint_or_eq args})
(a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
Some?.v (taint_of_arg a) == (mk_taint args init_taint) (taint_arg_b8 a))
= match args with
| [] -> ()
| hd::tl ->
mk_taint_equiv tl a;
let (| tag, x |) = hd in
match tag with
| TD_Base _ -> ()
| TD_Buffer _ _ _ | TD_ImmBuffer _ _ _ ->
disjoint_or_eq_cons hd tl;
BigOps.big_and'_forall (disjoint_or_eq_1 hd) tl
////////////////////////////////////////////////////////////////////////////////
let state_builder_t (max_arity:nat) (args:arg_list) (codom:Type) =
h0:HS.mem{mem_roots_p h0 args} ->
GTot codom
// Splitting the construction of the initial state into two functions
// one that creates the initial trusted state (i.e., part of our TCB)
// and another that just creates the vale state, a view upon the trusted one
let create_initial_trusted_state
(max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(args:arg_list)
: state_builder_t max_arity args (BS.machine_state & interop_heap) =
fun h0 ->
let open MS in
let regs_64 = register_of_args max_arity arg_reg (List.Tot.length args) args IA.init_regs in
let xmms = IA.init_xmms in
let flags = FunctionalExtensionality.on flag IA.init_flags in
let init_rsp = regs_64 rRsp in
let regs = FunctionalExtensionality.on_dom reg #t_reg (fun r ->
match r with
| Reg 0 r -> regs_64 r
| Reg 1 r -> xmms r)
in
// Create an initial empty stack
let stack = Map.const_on Set.empty 0 in
// Spill additional arguments on the stack
let stack = stack_of_args max_arity (List.Tot.length args) init_rsp args stack in
let mem:interop_heap = mk_mem args h0 in
let memTaint = create_memtaint mem (args_b8 args) (mk_taint args init_taint) in
let (s0:BS.machine_state) = {
BS.ms_ok = true;
BS.ms_regs = regs;
BS.ms_flags = flags;
BS.ms_heap = heap_create_impl mem memTaint;
BS.ms_stack = BS.Machine_stack init_rsp stack;
BS.ms_stackTaint = Map.const Public;
BS.ms_trace = [];
} in
(s0, mem)
////////////////////////////////////////////////////////////////////////////////
let prediction_pre_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
prop
let return_val_t (sn:BS.machine_state) = r:UInt64.t{UInt64.v r == BS.eval_reg_64 MS.rRax sn}
let return_val (sn:BS.machine_state) : return_val_t sn =
UInt64.uint_to_t (BS.eval_reg_64 MS.rRax sn)
let prediction_post_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
s0:BS.machine_state ->
(UInt64.t & nat & interop_heap) ->
sn:BS.machine_state ->
prop
[@__reduce__]
let prediction_pre
(n:nat)
(arg_reg:arg_reg_relation n)
(c:BS.code)
(args:arg_list)
(pre_rel: prediction_pre_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
=
pre_rel h0 /\
s0 == fst (create_initial_trusted_state n arg_reg args h0)
[@__reduce__]
let prediction_post
(n:nat)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(post_rel: prediction_post_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
(rax_fuel_mem:(UInt64.t & nat & interop_heap)) =
let (rax, fuel, final_mem) = rax_fuel_mem in
Some? (BS.machine_eval_code c fuel s0) /\ (
let s1 = Some?.v (BS.machine_eval_code c fuel s0) in
let h1 = hs_of_mem final_mem in
FStar.HyperStack.ST.equal_domains h0 h1 /\
B.modifies (loc_modified_args args) h0 h1 /\
mem_roots_p h1 args /\
heap_create_machine (mk_mem args h1) == heap_get s1.BS.ms_heap /\
calling_conventions s0 s1 regs_modified xmms_modified /\
rax == return_val s1 /\
post_rel h0 s0 rax_fuel_mem s1
)
let prediction
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(pre_rel:prediction_pre_rel_t c args)
(post_rel:prediction_post_rel_t c args) =
h0:mem_roots args{pre_rel h0} ->
s0:BS.machine_state ->
Ghost (UInt64.t & nat & interop_heap)
(requires prediction_pre n arg_reg c args pre_rel h0 s0)
(ensures prediction_post n regs_modified xmms_modified c args post_rel h0 s0)
noeq
type as_lowstar_sig_ret =
| As_lowstar_sig_ret :
n:nat ->
args:arg_list ->
fuel:nat ->
final_mem:interop_heap ->
as_lowstar_sig_ret
let als_ret = UInt64.t & Ghost.erased as_lowstar_sig_ret
[@__reduce__]
let as_lowstar_sig_post
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret:als_ret)
(h1:HS.mem) =
(* write it this way to be reduction friendly *)
let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\
n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
h1 == hs_of_mem final_mem /\
prediction_pre n arg_reg c args pre_rel h0 s0 /\
(rax, fuel, final_mem) == predict h0 s0 /\
prediction_post n regs_modified xmms_modified c args post_rel h0 s0 (rax, fuel, final_mem) /\
FStar.HyperStack.ST.equal_domains h0 h1)
[@__reduce__]
let as_lowstar_sig_post_weak
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret:als_ret)
(h1:HS.mem) =
(* write it this way to be reduction friendly *)
let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\
n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
(exists fuel
final_mem
s1.
h1 == hs_of_mem final_mem /\
rax == return_val s1 /\
post_rel h0 s0 (return_val s1, fuel, final_mem) s1))
[@__reduce__]
let as_lowstar_sig (c:BS.code) =
n:nat ->
arg_reg:arg_reg_relation n ->
regs_modified:(MS.reg_64 -> bool) ->
xmms_modified:(MS.reg_xmm -> bool) ->
args:arg_list ->
#pre_rel:_ ->
#post_rel:_ ->
predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel ->
FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 -> mem_roots_p h0 args /\ pre_rel h0))
(ensures fun h0 ret h1 -> as_lowstar_sig_post n arg_reg regs_modified xmms_modified c args h0 predict ret h1)
val wrap_variadic (c:BS.code) : as_lowstar_sig c
[@__reduce__]
let (++) (#t:td) (x:td_as_type t) (args:list arg) = (| t, x |) :: args
[@__reduce__]
let rec rel_gen_t
(c:BS.code)
(td:list td)
(args:arg_list{List.length args + List.length td <= 20})
(f: arg_list -> Type) =
match td with
| [] -> f args
| hd::tl ->
x:td_as_type hd ->
rel_gen_t c tl (x++args) f
[@__reduce__]
let elim_rel_gen_t_nil #c #args #f (x:rel_gen_t c [] args f)
: f args
= x
[@__reduce__]
let elim_rel_gen_t_cons #c hd tl #args #f (p:rel_gen_t c (hd::tl) args f)
: (x:td_as_type hd ->
rel_gen_t c tl (x++args) f)
= p
let rec prediction_t
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:arg_list{List.length dom + List.length args <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c))
=
match dom with
| [] ->
prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel
| hd::tl ->
x:td_as_type hd ->
prediction_t
n
arg_reg
regs_modified
xmms_modified
c
tl
(x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x)
[@__reduce__]
let elim_predict_t_nil
(#n:nat)
(#arg_reg:arg_reg_relation n)
(#regs_modified:MS.reg_64 -> bool)
(#xmms_modified:MS.reg_xmm -> bool)
(#c:BS.code)
(#args:arg_list)
(#pre_rel:_)
(#post_rel:_)
(p:prediction_t n arg_reg regs_modified xmms_modified c [] args pre_rel post_rel)
: prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel
= p
[@__reduce__]
let elim_predict_t_cons
(#n:nat)
(#arg_reg:arg_reg_relation n)
(#regs_modified:MS.reg_64 -> bool)
(#xmms_modified:MS.reg_xmm -> bool)
(#c:BS.code)
(hd:td)
(tl:list td)
(#args:arg_list{List.length args + List.length tl <= 19})
(#pre_rel:_)
(#post_rel:_)
(p:prediction_t n arg_reg regs_modified xmms_modified c (hd::tl) args pre_rel post_rel)
: x:td_as_type hd ->
prediction_t n arg_reg regs_modified xmms_modified c tl (x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x)
= p
[@__reduce__]
let rec as_lowstar_sig_t
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:arg_list{List.length args + List.length dom <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c)) | false | false | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val as_lowstar_sig_t : n: Prims.nat ->
arg_reg: Vale.Interop.X64.arg_reg_relation n ->
regs_modified: (_: Vale.X64.Machine_s.reg_64 -> Prims.bool) ->
xmms_modified: (_: Vale.X64.Machine_s.reg_xmm -> Prims.bool) ->
c: Vale.X64.Machine_Semantics_s.code ->
dom: Prims.list Vale.Interop.Base.td ->
args:
Vale.Interop.X64.arg_list
{FStar.List.Tot.Base.length args + FStar.List.Tot.Base.length dom <= 20} ->
pre_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_pre_rel_t c) ->
post_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_post_rel_t c) ->
predict:
Vale.Interop.X64.prediction_t n
arg_reg
regs_modified
xmms_modified
c
dom
args
pre_rel
post_rel
-> Type0 | [
"recursion"
] | Vale.Interop.X64.as_lowstar_sig_t | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
n: Prims.nat ->
arg_reg: Vale.Interop.X64.arg_reg_relation n ->
regs_modified: (_: Vale.X64.Machine_s.reg_64 -> Prims.bool) ->
xmms_modified: (_: Vale.X64.Machine_s.reg_xmm -> Prims.bool) ->
c: Vale.X64.Machine_Semantics_s.code ->
dom: Prims.list Vale.Interop.Base.td ->
args:
Vale.Interop.X64.arg_list
{FStar.List.Tot.Base.length args + FStar.List.Tot.Base.length dom <= 20} ->
pre_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_pre_rel_t c) ->
post_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_post_rel_t c) ->
predict:
Vale.Interop.X64.prediction_t n
arg_reg
regs_modified
xmms_modified
c
dom
args
pre_rel
post_rel
-> Type0 | {
"end_col": 47,
"end_line": 517,
"start_col": 6,
"start_line": 495
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r}) | let arg_reg_relation (n: nat) = | false | null | false | (v:
arg_reg_relation' n
{ forall (r: MS.reg_64). {:pattern v.of_reg r}
Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r }) | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"total"
] | [
"Prims.nat",
"Vale.Interop.X64.arg_reg_relation'",
"Prims.l_Forall",
"Vale.X64.Machine_s.reg_64",
"Prims.l_imp",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"Vale.Interop.X64.reg_nat",
"Vale.Interop.X64.__proj__Rel__item__of_reg",
"Prims.op_Equality",
"Vale.Interop.X64.__proj__Rel__item__of_arg",
"FStar.Pervasives.Native.__proj__Some__item__v"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n | false | true | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val arg_reg_relation : n: Prims.nat -> Type0 | [] | Vale.Interop.X64.arg_reg_relation | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | n: Prims.nat -> Type0 | {
"end_col": 105,
"end_line": 62,
"start_col": 31,
"start_line": 60
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec as_lowstar_sig_t_weak'
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:list arg{List.length args + List.length dom <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c))
(predict:prediction_t n arg_reg regs_modified xmms_modified c dom args pre_rel post_rel) =
match dom with
| [] ->
(unit ->
FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 ->
mem_roots_p h0 args /\
elim_rel_gen_t_nil pre_rel h0))
(ensures fun h0 ret h1 ->
as_lowstar_sig_post_weak n arg_reg regs_modified xmms_modified c args h0
#pre_rel #post_rel (elim_predict_t_nil predict) ret h1))
| hd::tl ->
x:td_as_type hd ->
as_lowstar_sig_t_weak'
n
arg_reg
regs_modified
xmms_modified
c
tl
(x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x)
(elim_predict_t_cons hd tl predict x) | let rec as_lowstar_sig_t_weak'
(n: nat)
(arg_reg: arg_reg_relation n)
(regs_modified: (MS.reg_64 -> bool))
(xmms_modified: (MS.reg_xmm -> bool))
(c: BS.code)
(dom: list td)
(args: list arg {List.length args + List.length dom <= 20})
(pre_rel: rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel: rel_gen_t c dom args (prediction_post_rel_t c))
(predict: prediction_t n arg_reg regs_modified xmms_modified c dom args pre_rel post_rel)
= | false | null | false | match dom with
| [] ->
(unit
-> FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 -> mem_roots_p h0 args /\ elim_rel_gen_t_nil pre_rel h0))
(ensures
fun h0 ret h1 ->
as_lowstar_sig_post_weak n arg_reg regs_modified xmms_modified c args h0 #pre_rel
#post_rel (elim_predict_t_nil predict) ret h1))
| hd :: tl ->
x: td_as_type hd
-> as_lowstar_sig_t_weak' n arg_reg regs_modified xmms_modified c tl (x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x) (elim_rel_gen_t_cons hd tl post_rel x)
(elim_predict_t_cons hd tl predict x) | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"total"
] | [
"Prims.nat",
"Vale.Interop.X64.arg_reg_relation",
"Vale.X64.Machine_s.reg_64",
"Prims.bool",
"Vale.X64.Machine_s.reg_xmm",
"Vale.X64.Machine_Semantics_s.code",
"Prims.list",
"Vale.Interop.Base.td",
"Vale.Interop.Base.arg",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.List.Tot.Base.length",
"Vale.Interop.X64.rel_gen_t",
"Vale.Interop.X64.prediction_pre_rel_t",
"Vale.Interop.X64.prediction_post_rel_t",
"Vale.Interop.X64.prediction_t",
"Prims.unit",
"Vale.Interop.X64.als_ret",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Vale.Interop.Base.mem_roots_p",
"Vale.Interop.X64.elim_rel_gen_t_nil",
"Vale.Interop.X64.as_lowstar_sig_post_weak",
"Vale.Interop.X64.elim_predict_t_nil",
"Vale.Interop.Base.td_as_type",
"Vale.Interop.X64.as_lowstar_sig_t_weak'",
"Vale.Interop.X64.op_Plus_Plus",
"Vale.Interop.X64.elim_rel_gen_t_cons",
"Vale.Interop.X64.elim_predict_t_cons"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n
unfold
let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r})
let registers = MS.reg_64 -> MS.nat64
let upd_reg (n:nat) (arg_reg:arg_reg_relation n) (regs:registers) (i:nat) (v:_) : registers =
fun (r:MS.reg_64) ->
match arg_reg.of_reg r with
| Some j ->
if i = j then v
else regs r
| _ -> regs r
[@__reduce__]
let arg_as_nat64 (a:arg) : GTot MS.nat64 =
let (| tag, x |) = a in
match tag with
| TD_Base TUInt8 ->
UInt8.v x
| TD_Base TUInt16 ->
UInt16.v x
| TD_Base TUInt32 ->
UInt32.v x
| TD_Base TUInt64 ->
UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x)
[@__reduce__]
let update_regs (n:nat)
(arg_reg:arg_reg_relation n)
(x:arg)
(i:reg_nat n)
(regs:registers)
: GTot registers
= upd_reg n arg_reg regs i (arg_as_nat64 x)
[@__reduce__]
let rec register_of_args (max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(n:nat)
(args:arg_list{List.Tot.length args = n})
(regs:registers) : GTot (regs':registers{regs MS.rRsp == regs' MS.rRsp}) =
match args with
| [] -> regs
| hd::tl ->
if n > max_arity then
// This arguments will be passed on the stack
register_of_args max_arity arg_reg (n-1) tl regs
else
update_regs max_arity arg_reg hd (n - 1) (register_of_args max_arity arg_reg (n - 1) tl regs)
// Pass extra arguments on the stack. The arity_ok condition on inline wrappers ensures that
// this only happens for stdcalls
[@__reduce__]
let rec stack_of_args (max_arity:nat)
(n:nat)
(rsp:int)
(args:arg_list{List.Tot.length args = n})
(st:Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) =
match args with
| [] -> st
| hd::tl ->
if n <= max_arity then st // We can pass the remaining args in registers
else
let ptr = ((n - max_arity) - 1) * 8 // Arguments on the stack are pushed from right to left
+ (if IA.win then 32 else 0) // The shadow space on Windows comes next
+ 8 // The return address is then pushed on the stack
+ rsp // And we then have all the extra slots required for the Vale procedure
in
let st1 = stack_of_args max_arity (n-1) rsp tl st in
let v = arg_as_nat64 hd in // We will store the arg hd
BS.update_heap64 ptr v st1
////////////////////////////////////////////////////////////////////////////////
let taint_map = b8 -> GTot taint
let upd_taint_map_b8 (tm:taint_map) (x:b8) (tnt:taint) : taint_map =
fun (y:b8) ->
if StrongExcludedMiddle.strong_excluded_middle ((x <: b8) == y) then
tnt
else tm y
[@__reduce__]
let upd_taint_map_arg (a:arg) (tm:taint_map) : GTot taint_map =
match a with
| (| TD_Buffer _ _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (Buffer true x) tnt
| (| TD_ImmBuffer src _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (imm_to_b8 src x) tnt
| (| TD_Base _, _ |) ->
tm
let init_taint : taint_map = fun r -> Public
[@__reduce__]
let mk_taint (args:arg_list_sb) (tm:taint_map) : GTot taint_map =
List.fold_right_gtot args upd_taint_map_arg init_taint
let taint_of_arg (a:arg) =
let (| tag, x |) = a in
match tag with
| TD_ImmBuffer _ TUInt64 {taint=tnt}
| TD_ImmBuffer _ TUInt128 {taint=tnt}
| TD_Buffer _ TUInt64 {taint=tnt}
| TD_Buffer _ TUInt128 {taint=tnt} -> Some tnt
| _ -> None
let taint_arg_b8 (a:arg{Some? (taint_of_arg a)}) : GTot b8 =
let (| tag, x |) = a in
match tag with
| TD_Buffer src _ _ -> Buffer true (x <: B.buffer (base_typ_as_type src))
| TD_ImmBuffer src _ _ -> imm_to_b8 src x
let rec taint_arg_args_b8_mem (args:arg_list) (a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
List.memP (taint_arg_b8 a) (args_b8 args))
= match args with
| [] -> ()
| hd::tl ->
taint_arg_args_b8_mem tl a
let rec mk_taint_equiv
(args:arg_list_sb{disjoint_or_eq args})
(a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
Some?.v (taint_of_arg a) == (mk_taint args init_taint) (taint_arg_b8 a))
= match args with
| [] -> ()
| hd::tl ->
mk_taint_equiv tl a;
let (| tag, x |) = hd in
match tag with
| TD_Base _ -> ()
| TD_Buffer _ _ _ | TD_ImmBuffer _ _ _ ->
disjoint_or_eq_cons hd tl;
BigOps.big_and'_forall (disjoint_or_eq_1 hd) tl
////////////////////////////////////////////////////////////////////////////////
let state_builder_t (max_arity:nat) (args:arg_list) (codom:Type) =
h0:HS.mem{mem_roots_p h0 args} ->
GTot codom
// Splitting the construction of the initial state into two functions
// one that creates the initial trusted state (i.e., part of our TCB)
// and another that just creates the vale state, a view upon the trusted one
let create_initial_trusted_state
(max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(args:arg_list)
: state_builder_t max_arity args (BS.machine_state & interop_heap) =
fun h0 ->
let open MS in
let regs_64 = register_of_args max_arity arg_reg (List.Tot.length args) args IA.init_regs in
let xmms = IA.init_xmms in
let flags = FunctionalExtensionality.on flag IA.init_flags in
let init_rsp = regs_64 rRsp in
let regs = FunctionalExtensionality.on_dom reg #t_reg (fun r ->
match r with
| Reg 0 r -> regs_64 r
| Reg 1 r -> xmms r)
in
// Create an initial empty stack
let stack = Map.const_on Set.empty 0 in
// Spill additional arguments on the stack
let stack = stack_of_args max_arity (List.Tot.length args) init_rsp args stack in
let mem:interop_heap = mk_mem args h0 in
let memTaint = create_memtaint mem (args_b8 args) (mk_taint args init_taint) in
let (s0:BS.machine_state) = {
BS.ms_ok = true;
BS.ms_regs = regs;
BS.ms_flags = flags;
BS.ms_heap = heap_create_impl mem memTaint;
BS.ms_stack = BS.Machine_stack init_rsp stack;
BS.ms_stackTaint = Map.const Public;
BS.ms_trace = [];
} in
(s0, mem)
////////////////////////////////////////////////////////////////////////////////
let prediction_pre_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
prop
let return_val_t (sn:BS.machine_state) = r:UInt64.t{UInt64.v r == BS.eval_reg_64 MS.rRax sn}
let return_val (sn:BS.machine_state) : return_val_t sn =
UInt64.uint_to_t (BS.eval_reg_64 MS.rRax sn)
let prediction_post_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
s0:BS.machine_state ->
(UInt64.t & nat & interop_heap) ->
sn:BS.machine_state ->
prop
[@__reduce__]
let prediction_pre
(n:nat)
(arg_reg:arg_reg_relation n)
(c:BS.code)
(args:arg_list)
(pre_rel: prediction_pre_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
=
pre_rel h0 /\
s0 == fst (create_initial_trusted_state n arg_reg args h0)
[@__reduce__]
let prediction_post
(n:nat)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(post_rel: prediction_post_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
(rax_fuel_mem:(UInt64.t & nat & interop_heap)) =
let (rax, fuel, final_mem) = rax_fuel_mem in
Some? (BS.machine_eval_code c fuel s0) /\ (
let s1 = Some?.v (BS.machine_eval_code c fuel s0) in
let h1 = hs_of_mem final_mem in
FStar.HyperStack.ST.equal_domains h0 h1 /\
B.modifies (loc_modified_args args) h0 h1 /\
mem_roots_p h1 args /\
heap_create_machine (mk_mem args h1) == heap_get s1.BS.ms_heap /\
calling_conventions s0 s1 regs_modified xmms_modified /\
rax == return_val s1 /\
post_rel h0 s0 rax_fuel_mem s1
)
let prediction
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(pre_rel:prediction_pre_rel_t c args)
(post_rel:prediction_post_rel_t c args) =
h0:mem_roots args{pre_rel h0} ->
s0:BS.machine_state ->
Ghost (UInt64.t & nat & interop_heap)
(requires prediction_pre n arg_reg c args pre_rel h0 s0)
(ensures prediction_post n regs_modified xmms_modified c args post_rel h0 s0)
noeq
type as_lowstar_sig_ret =
| As_lowstar_sig_ret :
n:nat ->
args:arg_list ->
fuel:nat ->
final_mem:interop_heap ->
as_lowstar_sig_ret
let als_ret = UInt64.t & Ghost.erased as_lowstar_sig_ret
[@__reduce__]
let as_lowstar_sig_post
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret:als_ret)
(h1:HS.mem) =
(* write it this way to be reduction friendly *)
let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\
n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
h1 == hs_of_mem final_mem /\
prediction_pre n arg_reg c args pre_rel h0 s0 /\
(rax, fuel, final_mem) == predict h0 s0 /\
prediction_post n regs_modified xmms_modified c args post_rel h0 s0 (rax, fuel, final_mem) /\
FStar.HyperStack.ST.equal_domains h0 h1)
[@__reduce__]
let as_lowstar_sig_post_weak
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret:als_ret)
(h1:HS.mem) =
(* write it this way to be reduction friendly *)
let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\
n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
(exists fuel
final_mem
s1.
h1 == hs_of_mem final_mem /\
rax == return_val s1 /\
post_rel h0 s0 (return_val s1, fuel, final_mem) s1))
[@__reduce__]
let as_lowstar_sig (c:BS.code) =
n:nat ->
arg_reg:arg_reg_relation n ->
regs_modified:(MS.reg_64 -> bool) ->
xmms_modified:(MS.reg_xmm -> bool) ->
args:arg_list ->
#pre_rel:_ ->
#post_rel:_ ->
predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel ->
FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 -> mem_roots_p h0 args /\ pre_rel h0))
(ensures fun h0 ret h1 -> as_lowstar_sig_post n arg_reg regs_modified xmms_modified c args h0 predict ret h1)
val wrap_variadic (c:BS.code) : as_lowstar_sig c
[@__reduce__]
let (++) (#t:td) (x:td_as_type t) (args:list arg) = (| t, x |) :: args
[@__reduce__]
let rec rel_gen_t
(c:BS.code)
(td:list td)
(args:arg_list{List.length args + List.length td <= 20})
(f: arg_list -> Type) =
match td with
| [] -> f args
| hd::tl ->
x:td_as_type hd ->
rel_gen_t c tl (x++args) f
[@__reduce__]
let elim_rel_gen_t_nil #c #args #f (x:rel_gen_t c [] args f)
: f args
= x
[@__reduce__]
let elim_rel_gen_t_cons #c hd tl #args #f (p:rel_gen_t c (hd::tl) args f)
: (x:td_as_type hd ->
rel_gen_t c tl (x++args) f)
= p
let rec prediction_t
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:arg_list{List.length dom + List.length args <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c))
=
match dom with
| [] ->
prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel
| hd::tl ->
x:td_as_type hd ->
prediction_t
n
arg_reg
regs_modified
xmms_modified
c
tl
(x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x)
[@__reduce__]
let elim_predict_t_nil
(#n:nat)
(#arg_reg:arg_reg_relation n)
(#regs_modified:MS.reg_64 -> bool)
(#xmms_modified:MS.reg_xmm -> bool)
(#c:BS.code)
(#args:arg_list)
(#pre_rel:_)
(#post_rel:_)
(p:prediction_t n arg_reg regs_modified xmms_modified c [] args pre_rel post_rel)
: prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel
= p
[@__reduce__]
let elim_predict_t_cons
(#n:nat)
(#arg_reg:arg_reg_relation n)
(#regs_modified:MS.reg_64 -> bool)
(#xmms_modified:MS.reg_xmm -> bool)
(#c:BS.code)
(hd:td)
(tl:list td)
(#args:arg_list{List.length args + List.length tl <= 19})
(#pre_rel:_)
(#post_rel:_)
(p:prediction_t n arg_reg regs_modified xmms_modified c (hd::tl) args pre_rel post_rel)
: x:td_as_type hd ->
prediction_t n arg_reg regs_modified xmms_modified c tl (x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x)
= p
[@__reduce__]
let rec as_lowstar_sig_t
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:arg_list{List.length args + List.length dom <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c))
(predict:prediction_t n arg_reg regs_modified xmms_modified c dom args pre_rel post_rel) =
match dom with
| [] ->
(unit ->
FStar.HyperStack.ST.Stack als_ret
(requires (fun h0 ->
mem_roots_p h0 args /\
elim_rel_gen_t_nil pre_rel h0))
(ensures fun h0 ret h1 ->
as_lowstar_sig_post n arg_reg regs_modified xmms_modified c args h0
#pre_rel #post_rel (elim_predict_t_nil predict) ret h1))
| hd::tl ->
x:td_as_type hd ->
as_lowstar_sig_t
n
arg_reg
regs_modified
xmms_modified
c
tl
(x ++ args)
(elim_rel_gen_t_cons hd tl pre_rel x)
(elim_rel_gen_t_cons hd tl post_rel x)
(elim_predict_t_cons hd tl predict x)
private
val wrap'
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td{List.length dom <= 20})
(#pre_rel:rel_gen_t c dom [] (prediction_pre_rel_t c))
(#post_rel:rel_gen_t c dom [] (prediction_post_rel_t c))
(predict:prediction_t n arg_reg regs_modified xmms_modified c dom [] pre_rel post_rel)
: as_lowstar_sig_t n arg_reg regs_modified xmms_modified c dom [] pre_rel post_rel predict
[@__reduce__]
private
let rec as_lowstar_sig_t_weak'
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(dom:list td)
(args:list arg{List.length args + List.length dom <= 20})
(pre_rel:rel_gen_t c dom args (prediction_pre_rel_t c))
(post_rel:rel_gen_t c dom args (prediction_post_rel_t c)) | false | false | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val as_lowstar_sig_t_weak' : n: Prims.nat ->
arg_reg: Vale.Interop.X64.arg_reg_relation n ->
regs_modified: (_: Vale.X64.Machine_s.reg_64 -> Prims.bool) ->
xmms_modified: (_: Vale.X64.Machine_s.reg_xmm -> Prims.bool) ->
c: Vale.X64.Machine_Semantics_s.code ->
dom: Prims.list Vale.Interop.Base.td ->
args:
Prims.list Vale.Interop.Base.arg
{FStar.List.Tot.Base.length args + FStar.List.Tot.Base.length dom <= 20} ->
pre_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_pre_rel_t c) ->
post_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_post_rel_t c) ->
predict:
Vale.Interop.X64.prediction_t n
arg_reg
regs_modified
xmms_modified
c
dom
args
pre_rel
post_rel
-> Type0 | [
"recursion"
] | Vale.Interop.X64.as_lowstar_sig_t_weak' | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
n: Prims.nat ->
arg_reg: Vale.Interop.X64.arg_reg_relation n ->
regs_modified: (_: Vale.X64.Machine_s.reg_64 -> Prims.bool) ->
xmms_modified: (_: Vale.X64.Machine_s.reg_xmm -> Prims.bool) ->
c: Vale.X64.Machine_Semantics_s.code ->
dom: Prims.list Vale.Interop.Base.td ->
args:
Prims.list Vale.Interop.Base.arg
{FStar.List.Tot.Base.length args + FStar.List.Tot.Base.length dom <= 20} ->
pre_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_pre_rel_t c) ->
post_rel: Vale.Interop.X64.rel_gen_t c dom args (Vale.Interop.X64.prediction_post_rel_t c) ->
predict:
Vale.Interop.X64.prediction_t n
arg_reg
regs_modified
xmms_modified
c
dom
args
pre_rel
post_rel
-> Type0 | {
"end_col": 47,
"end_line": 567,
"start_col": 6,
"start_line": 545
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.List.Tot",
"short_module": "List"
},
{
"abbrev": true,
"full_module": "Vale.Interop.Assumptions",
"short_module": "IA"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_s",
"short_module": "MS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Down",
"short_module": "DV"
},
{
"abbrev": true,
"full_module": "LowStar.BufferView.Up",
"short_module": "UV"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Interop",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let as_lowstar_sig_post
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret:als_ret)
(h1:HS.mem) =
(* write it this way to be reduction friendly *)
let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\
n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
h1 == hs_of_mem final_mem /\
prediction_pre n arg_reg c args pre_rel h0 s0 /\
(rax, fuel, final_mem) == predict h0 s0 /\
prediction_post n regs_modified xmms_modified c args post_rel h0 s0 (rax, fuel, final_mem) /\
FStar.HyperStack.ST.equal_domains h0 h1) | let as_lowstar_sig_post
(n: nat)
(arg_reg: arg_reg_relation n)
(regs_modified: (MS.reg_64 -> bool))
(xmms_modified: (MS.reg_xmm -> bool))
(c: BS.code)
(args: arg_list)
(h0: mem_roots args)
(#pre_rel #post_rel: _)
(predict: prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel)
(ret: als_ret)
(h1: HS.mem)
= | false | null | false | let rax = fst ret in
let ret = Ghost.reveal (snd ret) in
args == As_lowstar_sig_ret?.args ret /\ n == As_lowstar_sig_ret?.n ret /\
(let fuel = As_lowstar_sig_ret?.fuel ret in
let final_mem = As_lowstar_sig_ret?.final_mem ret in
let s0 = fst (create_initial_trusted_state n arg_reg args h0) in
h1 == hs_of_mem final_mem /\ prediction_pre n arg_reg c args pre_rel h0 s0 /\
(rax, fuel, final_mem) == predict h0 s0 /\
prediction_post n regs_modified xmms_modified c args post_rel h0 s0 (rax, fuel, final_mem) /\
FStar.HyperStack.ST.equal_domains h0 h1) | {
"checked_file": "Vale.Interop.X64.fsti.checked",
"dependencies": [
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.Interop.Base.fst.checked",
"Vale.Interop.Assumptions.fst.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"LowStar.BufferView.Up.fsti.checked",
"LowStar.BufferView.Down.fsti.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt16.fsti.checked",
"FStar.StrongExcludedMiddle.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.BigOps.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.Interop.X64.fsti"
} | [
"total"
] | [
"Prims.nat",
"Vale.Interop.X64.arg_reg_relation",
"Vale.X64.Machine_s.reg_64",
"Prims.bool",
"Vale.X64.Machine_s.reg_xmm",
"Vale.X64.Machine_Semantics_s.code",
"Vale.Interop.X64.arg_list",
"Vale.Interop.Base.mem_roots",
"Vale.Interop.X64.prediction_pre_rel_t",
"Vale.Interop.X64.prediction_post_rel_t",
"Vale.Interop.X64.prediction",
"Vale.Interop.X64.als_ret",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"Prims.eq2",
"Vale.Interop.X64.__proj__As_lowstar_sig_ret__item__args",
"Vale.Interop.X64.__proj__As_lowstar_sig_ret__item__n",
"Vale.Interop.Heap_s.hs_of_mem",
"Vale.Interop.X64.prediction_pre",
"FStar.Pervasives.Native.tuple3",
"FStar.UInt64.t",
"Vale.Interop.Heap_s.interop_heap",
"FStar.Pervasives.Native.Mktuple3",
"Vale.Interop.X64.prediction_post",
"FStar.HyperStack.ST.equal_domains",
"Vale.X64.Machine_Semantics_s.machine_state",
"FStar.Pervasives.Native.fst",
"Vale.Interop.X64.create_initial_trusted_state",
"Vale.Interop.X64.__proj__As_lowstar_sig_ret__item__final_mem",
"Vale.Interop.X64.__proj__As_lowstar_sig_ret__item__fuel",
"Vale.Interop.X64.as_lowstar_sig_ret",
"FStar.Ghost.reveal",
"FStar.Pervasives.Native.snd",
"FStar.Ghost.erased",
"Prims.logical"
] | [] | module Vale.Interop.X64
open FStar.Mul
open Vale.Interop.Base
open Vale.Arch.HeapTypes_s
open Vale.Arch.Heap
module B = LowStar.Buffer
module BS = Vale.X64.Machine_Semantics_s
module UV = LowStar.BufferView.Up
module DV = LowStar.BufferView.Down
module HS = FStar.HyperStack
module MS = Vale.X64.Machine_s
module IA = Vale.Interop.Assumptions
module List = FStar.List.Tot
////////////////////////////////////////////////////////////////////////////////
//The calling convention w.r.t the register mapping
////////////////////////////////////////////////////////////////////////////////
let calling_conventions
(s0 s1:BS.machine_state)
(regs_modified: MS.reg_64 -> bool)
(xmms_modified: MS.reg_xmm -> bool) =
let s0 = s0 in
let s1 = s1 in
s1.BS.ms_ok /\
s0.BS.ms_regs MS.reg_Rsp == s1.BS.ms_regs MS.reg_Rsp /\
(forall (r:MS.reg). {:pattern (s0.BS.ms_regs r)}
match r with
| MS.Reg 0 r -> not (regs_modified r) ==> s0.BS.ms_regs (MS.Reg 0 r) == s1.BS.ms_regs (MS.Reg 0 r)
| MS.Reg 1 r -> not (xmms_modified r) ==> s0.BS.ms_regs (MS.Reg 1 r) == s1.BS.ms_regs (MS.Reg 1 r)
)
let reg_nat (n:nat) = i:nat{i < n}
let arity_ok n 'a = l:list 'a { List.Tot.length l <= n }
(* We limit the number of args we can pass through the interop wrappers to an arbitrary 20.
This ensures first that the addr_map axiom is sound: Since the length of buffers is limited to 2^32, we can prove that addr_map is inhabited.
for extra arguments + the extra slots needed.
Note that this number can be increased if needed*)
let arg_list = l:list arg{List.Tot.length l <= 20}
let arg_list_sb = l:list arg{List.Tot.length l <= 21}
unfold
let injective f = forall x y.{:pattern f x; f y} f x == f y ==> x == y
noeq
type arg_reg_relation' (n:nat) =
| Rel: of_reg:(MS.reg_64 -> option (reg_nat n)) ->
of_arg:(reg_nat n -> MS.reg_64){
// This function should be injective
injective of_arg /\
// rRsp is not a valid register to store paramters
(forall (i:reg_nat n).{:pattern of_arg i} of_arg i <> MS.rRsp) /\
// of_reg should always return Some when the register corresponds to an of_arg
(forall (i:reg_nat n).{:pattern of_arg i}
Some? (of_reg (of_arg i)) /\ Some?.v (of_reg (of_arg i)) = i)} ->
arg_reg_relation' n
unfold
let arg_reg_relation (n:nat) = (v:arg_reg_relation' n{
// of_reg is a partial inverse of of_arg
forall (r:MS.reg_64).{:pattern v.of_reg r} Some? (v.of_reg r) ==> v.of_arg (Some?.v (v.of_reg r)) = r})
let registers = MS.reg_64 -> MS.nat64
let upd_reg (n:nat) (arg_reg:arg_reg_relation n) (regs:registers) (i:nat) (v:_) : registers =
fun (r:MS.reg_64) ->
match arg_reg.of_reg r with
| Some j ->
if i = j then v
else regs r
| _ -> regs r
[@__reduce__]
let arg_as_nat64 (a:arg) : GTot MS.nat64 =
let (| tag, x |) = a in
match tag with
| TD_Base TUInt8 ->
UInt8.v x
| TD_Base TUInt16 ->
UInt16.v x
| TD_Base TUInt32 ->
UInt32.v x
| TD_Base TUInt64 ->
UInt64.v x
| TD_Buffer src _ _ ->
let b:b8 = Buffer true (x <: B.buffer (base_typ_as_type src)) in
global_addrs_map b
| TD_ImmBuffer src _ _ -> global_addrs_map (imm_to_b8 src x)
[@__reduce__]
let update_regs (n:nat)
(arg_reg:arg_reg_relation n)
(x:arg)
(i:reg_nat n)
(regs:registers)
: GTot registers
= upd_reg n arg_reg regs i (arg_as_nat64 x)
[@__reduce__]
let rec register_of_args (max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(n:nat)
(args:arg_list{List.Tot.length args = n})
(regs:registers) : GTot (regs':registers{regs MS.rRsp == regs' MS.rRsp}) =
match args with
| [] -> regs
| hd::tl ->
if n > max_arity then
// This arguments will be passed on the stack
register_of_args max_arity arg_reg (n-1) tl regs
else
update_regs max_arity arg_reg hd (n - 1) (register_of_args max_arity arg_reg (n - 1) tl regs)
// Pass extra arguments on the stack. The arity_ok condition on inline wrappers ensures that
// this only happens for stdcalls
[@__reduce__]
let rec stack_of_args (max_arity:nat)
(n:nat)
(rsp:int)
(args:arg_list{List.Tot.length args = n})
(st:Map.t int Vale.Def.Words_s.nat8)
: GTot (Map.t int Vale.Def.Words_s.nat8) =
match args with
| [] -> st
| hd::tl ->
if n <= max_arity then st // We can pass the remaining args in registers
else
let ptr = ((n - max_arity) - 1) * 8 // Arguments on the stack are pushed from right to left
+ (if IA.win then 32 else 0) // The shadow space on Windows comes next
+ 8 // The return address is then pushed on the stack
+ rsp // And we then have all the extra slots required for the Vale procedure
in
let st1 = stack_of_args max_arity (n-1) rsp tl st in
let v = arg_as_nat64 hd in // We will store the arg hd
BS.update_heap64 ptr v st1
////////////////////////////////////////////////////////////////////////////////
let taint_map = b8 -> GTot taint
let upd_taint_map_b8 (tm:taint_map) (x:b8) (tnt:taint) : taint_map =
fun (y:b8) ->
if StrongExcludedMiddle.strong_excluded_middle ((x <: b8) == y) then
tnt
else tm y
[@__reduce__]
let upd_taint_map_arg (a:arg) (tm:taint_map) : GTot taint_map =
match a with
| (| TD_Buffer _ _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (Buffer true x) tnt
| (| TD_ImmBuffer src _ {taint=tnt}, x |) ->
upd_taint_map_b8 tm (imm_to_b8 src x) tnt
| (| TD_Base _, _ |) ->
tm
let init_taint : taint_map = fun r -> Public
[@__reduce__]
let mk_taint (args:arg_list_sb) (tm:taint_map) : GTot taint_map =
List.fold_right_gtot args upd_taint_map_arg init_taint
let taint_of_arg (a:arg) =
let (| tag, x |) = a in
match tag with
| TD_ImmBuffer _ TUInt64 {taint=tnt}
| TD_ImmBuffer _ TUInt128 {taint=tnt}
| TD_Buffer _ TUInt64 {taint=tnt}
| TD_Buffer _ TUInt128 {taint=tnt} -> Some tnt
| _ -> None
let taint_arg_b8 (a:arg{Some? (taint_of_arg a)}) : GTot b8 =
let (| tag, x |) = a in
match tag with
| TD_Buffer src _ _ -> Buffer true (x <: B.buffer (base_typ_as_type src))
| TD_ImmBuffer src _ _ -> imm_to_b8 src x
let rec taint_arg_args_b8_mem (args:arg_list) (a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
List.memP (taint_arg_b8 a) (args_b8 args))
= match args with
| [] -> ()
| hd::tl ->
taint_arg_args_b8_mem tl a
let rec mk_taint_equiv
(args:arg_list_sb{disjoint_or_eq args})
(a:arg)
: Lemma (List.memP a args /\ Some? (taint_of_arg a) ==>
Some?.v (taint_of_arg a) == (mk_taint args init_taint) (taint_arg_b8 a))
= match args with
| [] -> ()
| hd::tl ->
mk_taint_equiv tl a;
let (| tag, x |) = hd in
match tag with
| TD_Base _ -> ()
| TD_Buffer _ _ _ | TD_ImmBuffer _ _ _ ->
disjoint_or_eq_cons hd tl;
BigOps.big_and'_forall (disjoint_or_eq_1 hd) tl
////////////////////////////////////////////////////////////////////////////////
let state_builder_t (max_arity:nat) (args:arg_list) (codom:Type) =
h0:HS.mem{mem_roots_p h0 args} ->
GTot codom
// Splitting the construction of the initial state into two functions
// one that creates the initial trusted state (i.e., part of our TCB)
// and another that just creates the vale state, a view upon the trusted one
let create_initial_trusted_state
(max_arity:nat)
(arg_reg:arg_reg_relation max_arity)
(args:arg_list)
: state_builder_t max_arity args (BS.machine_state & interop_heap) =
fun h0 ->
let open MS in
let regs_64 = register_of_args max_arity arg_reg (List.Tot.length args) args IA.init_regs in
let xmms = IA.init_xmms in
let flags = FunctionalExtensionality.on flag IA.init_flags in
let init_rsp = regs_64 rRsp in
let regs = FunctionalExtensionality.on_dom reg #t_reg (fun r ->
match r with
| Reg 0 r -> regs_64 r
| Reg 1 r -> xmms r)
in
// Create an initial empty stack
let stack = Map.const_on Set.empty 0 in
// Spill additional arguments on the stack
let stack = stack_of_args max_arity (List.Tot.length args) init_rsp args stack in
let mem:interop_heap = mk_mem args h0 in
let memTaint = create_memtaint mem (args_b8 args) (mk_taint args init_taint) in
let (s0:BS.machine_state) = {
BS.ms_ok = true;
BS.ms_regs = regs;
BS.ms_flags = flags;
BS.ms_heap = heap_create_impl mem memTaint;
BS.ms_stack = BS.Machine_stack init_rsp stack;
BS.ms_stackTaint = Map.const Public;
BS.ms_trace = [];
} in
(s0, mem)
////////////////////////////////////////////////////////////////////////////////
let prediction_pre_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
prop
let return_val_t (sn:BS.machine_state) = r:UInt64.t{UInt64.v r == BS.eval_reg_64 MS.rRax sn}
let return_val (sn:BS.machine_state) : return_val_t sn =
UInt64.uint_to_t (BS.eval_reg_64 MS.rRax sn)
let prediction_post_rel_t (c:BS.code) (args:arg_list) =
h0:mem_roots args ->
s0:BS.machine_state ->
(UInt64.t & nat & interop_heap) ->
sn:BS.machine_state ->
prop
[@__reduce__]
let prediction_pre
(n:nat)
(arg_reg:arg_reg_relation n)
(c:BS.code)
(args:arg_list)
(pre_rel: prediction_pre_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
=
pre_rel h0 /\
s0 == fst (create_initial_trusted_state n arg_reg args h0)
[@__reduce__]
let prediction_post
(n:nat)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(post_rel: prediction_post_rel_t c args)
(h0:mem_roots args)
(s0:BS.machine_state)
(rax_fuel_mem:(UInt64.t & nat & interop_heap)) =
let (rax, fuel, final_mem) = rax_fuel_mem in
Some? (BS.machine_eval_code c fuel s0) /\ (
let s1 = Some?.v (BS.machine_eval_code c fuel s0) in
let h1 = hs_of_mem final_mem in
FStar.HyperStack.ST.equal_domains h0 h1 /\
B.modifies (loc_modified_args args) h0 h1 /\
mem_roots_p h1 args /\
heap_create_machine (mk_mem args h1) == heap_get s1.BS.ms_heap /\
calling_conventions s0 s1 regs_modified xmms_modified /\
rax == return_val s1 /\
post_rel h0 s0 rax_fuel_mem s1
)
let prediction
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(pre_rel:prediction_pre_rel_t c args)
(post_rel:prediction_post_rel_t c args) =
h0:mem_roots args{pre_rel h0} ->
s0:BS.machine_state ->
Ghost (UInt64.t & nat & interop_heap)
(requires prediction_pre n arg_reg c args pre_rel h0 s0)
(ensures prediction_post n regs_modified xmms_modified c args post_rel h0 s0)
noeq
type as_lowstar_sig_ret =
| As_lowstar_sig_ret :
n:nat ->
args:arg_list ->
fuel:nat ->
final_mem:interop_heap ->
as_lowstar_sig_ret
let als_ret = UInt64.t & Ghost.erased as_lowstar_sig_ret
[@__reduce__]
let as_lowstar_sig_post
(n:nat)
(arg_reg:arg_reg_relation n)
(regs_modified:MS.reg_64 -> bool)
(xmms_modified:MS.reg_xmm -> bool)
(c:BS.code)
(args:arg_list)
(h0:mem_roots args)
(#pre_rel:_)
(#post_rel: _)
(predict:prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel) | false | false | Vale.Interop.X64.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val as_lowstar_sig_post : n: Prims.nat ->
arg_reg: Vale.Interop.X64.arg_reg_relation n ->
regs_modified: (_: Vale.X64.Machine_s.reg_64 -> Prims.bool) ->
xmms_modified: (_: Vale.X64.Machine_s.reg_xmm -> Prims.bool) ->
c: Vale.X64.Machine_Semantics_s.code ->
args: Vale.Interop.X64.arg_list ->
h0: Vale.Interop.Base.mem_roots args ->
predict:
Vale.Interop.X64.prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel ->
ret: Vale.Interop.X64.als_ret ->
h1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | [] | Vale.Interop.X64.as_lowstar_sig_post | {
"file_name": "vale/specs/interop/Vale.Interop.X64.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
n: Prims.nat ->
arg_reg: Vale.Interop.X64.arg_reg_relation n ->
regs_modified: (_: Vale.X64.Machine_s.reg_64 -> Prims.bool) ->
xmms_modified: (_: Vale.X64.Machine_s.reg_xmm -> Prims.bool) ->
c: Vale.X64.Machine_Semantics_s.code ->
args: Vale.Interop.X64.arg_list ->
h0: Vale.Interop.Base.mem_roots args ->
predict:
Vale.Interop.X64.prediction n arg_reg regs_modified xmms_modified c args pre_rel post_rel ->
ret: Vale.Interop.X64.als_ret ->
h1: FStar.Monotonic.HyperStack.mem
-> Prims.logical | {
"end_col": 42,
"end_line": 349,
"start_col": 17,
"start_line": 336
} |
|
Prims.GTot | val ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length | val ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 = | false | null | false | larger.b_is_mm == smaller.b_is_mm /\ larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.ubuffer_",
"Prims.l_and",
"Prims.eq2",
"Prims.bool",
"LowStar.Monotonic.Buffer.__proj__Mkubuffer___item__b_is_mm",
"Prims.nat",
"LowStar.Monotonic.Buffer.__proj__Mkubuffer___item__b_max_length",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowStar.Monotonic.Buffer.__proj__Mkubuffer___item__b_offset",
"Prims.op_Addition",
"LowStar.Monotonic.Buffer.__proj__Mkubuffer___item__b_length"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 | [] | LowStar.Monotonic.Buffer.ubuffer_includes' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | larger: LowStar.Monotonic.Buffer.ubuffer_ -> smaller: LowStar.Monotonic.Buffer.ubuffer_
-> Prims.GTot Type0 | {
"end_col": 74,
"end_line": 468,
"start_col": 2,
"start_line": 465
} |
Prims.Tot | val ubuffer (region: HS.rid) (addr: nat) : Tot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr) | val ubuffer (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = | false | null | false | G.erased (ubuffer' region addr) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"FStar.Ghost.erased",
"LowStar.Monotonic.Buffer.ubuffer'"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } ) | false | true | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer (region: HS.rid) (addr: nat) : Tot Type0 | [] | LowStar.Monotonic.Buffer.ubuffer | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | region: FStar.Monotonic.HyperHeap.rid -> addr: Prims.nat -> Type0 | {
"end_col": 86,
"end_line": 261,
"start_col": 55,
"start_line": 261
} |
Prims.Tot | val loc : Type0 | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc = MG.loc cls | val loc : Type0
let loc = | false | null | false | MG.loc cls | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"FStar.ModifiesGen.loc",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f) | false | true | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc : Type0 | [] | LowStar.Monotonic.Buffer.loc | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Type0 | {
"end_col": 20,
"end_line": 779,
"start_col": 10,
"start_line": 779
} |
Prims.GTot | val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2 | val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = | false | null | false | ubuffer_disjoint0 b1 b2 | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.ubuffer_disjoint0"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0 | [] | LowStar.Monotonic.Buffer.ubuffer_disjoint | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b1: LowStar.Monotonic.Buffer.ubuffer r a -> b2: LowStar.Monotonic.Buffer.ubuffer r a
-> Prims.GTot Type0 | {
"end_col": 58,
"end_line": 525,
"start_col": 35,
"start_line": 525
} |
Prims.GTot | val modifies_1_preserves_ubuffers
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2 | val modifies_1_preserves_ubuffers
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0
let modifies_1_preserves_ubuffers
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 = | false | null | false | forall (b': ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==>
ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2 | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_Forall",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"Prims.l_imp",
"LowStar.Monotonic.Buffer.ubuffer_disjoint",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer",
"LowStar.Monotonic.Buffer.ubuffer_preserved"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_1_preserves_ubuffers
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_1_preserves_ubuffers | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 132,
"end_line": 625,
"start_col": 4,
"start_line": 624
} |
Prims.GTot | val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) | val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc
let loc_buffer_from_to #a #rrel #rel b from to = | false | null | false | if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer_from_to_none_cond",
"FStar.ModifiesGen.loc_none",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls",
"Prims.bool",
"FStar.ModifiesGen.loc_of_aloc",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer_from_to",
"LowStar.Monotonic.Buffer.loc"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_buffer_from_to (#a:Type0) (#rrel #rel:srel a) (b: mbuffer a rrel rel) (from to: U32.t) : GTot loc | [] | LowStar.Monotonic.Buffer.loc_buffer_from_to | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> from: FStar.UInt32.t -> to: FStar.UInt32.t
-> Prims.GTot LowStar.Monotonic.Buffer.loc | {
"end_col": 88,
"end_line": 802,
"start_col": 2,
"start_line": 799
} |
Prims.Tot | val loc_none: loc | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_none = MG.loc_none | val loc_none: loc
let loc_none = | false | null | false | MG.loc_none | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"FStar.ModifiesGen.loc_none",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc | false | true | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_none: loc | [] | LowStar.Monotonic.Buffer.loc_none | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | LowStar.Monotonic.Buffer.loc | {
"end_col": 26,
"end_line": 782,
"start_col": 15,
"start_line": 782
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel | let compatible_sub_preorder
(#a: Type0)
(len: nat)
(rel: srel a)
(i: nat)
(j: nat{i <= j /\ j <= len})
(sub_rel: srel a)
= | false | null | false | compatible_subseq_preorder len rel i j sub_rel | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"Prims.nat",
"LowStar.Monotonic.Buffer.srel",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowStar.Monotonic.Buffer.compatible_subseq_preorder",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val compatible_sub_preorder : len: Prims.nat ->
rel: LowStar.Monotonic.Buffer.srel a ->
i: Prims.nat ->
j: Prims.nat{i <= j /\ j <= len} ->
sub_rel: LowStar.Monotonic.Buffer.srel a
-> Prims.logical | [] | LowStar.Monotonic.Buffer.compatible_sub_preorder | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
len: Prims.nat ->
rel: LowStar.Monotonic.Buffer.srel a ->
i: Prims.nat ->
j: Prims.nat{i <= j /\ j <= len} ->
sub_rel: LowStar.Monotonic.Buffer.srel a
-> Prims.logical | {
"end_col": 50,
"end_line": 36,
"start_col": 4,
"start_line": 36
} |
|
Prims.Tot | val srel_to_lsrel (#a: Type0) (len: nat) (pre: srel a) : P.preorder (Seq.lseq a len) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre | val srel_to_lsrel (#a: Type0) (len: nat) (pre: srel a) : P.preorder (Seq.lseq a len)
let srel_to_lsrel (#a: Type0) (len: nat) (pre: srel a) : P.preorder (Seq.lseq a len) = | false | null | false | pre | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"Prims.nat",
"LowStar.Monotonic.Buffer.srel",
"FStar.Preorder.preorder",
"FStar.Seq.Properties.lseq"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val srel_to_lsrel (#a: Type0) (len: nat) (pre: srel a) : P.preorder (Seq.lseq a len) | [] | LowStar.Monotonic.Buffer.srel_to_lsrel | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | len: Prims.nat -> pre: LowStar.Monotonic.Buffer.srel a
-> FStar.Preorder.preorder (FStar.Seq.Properties.lseq a len) | {
"end_col": 94,
"end_line": 26,
"start_col": 91,
"start_line": 26
} |
Prims.GTot | val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let g_is_null #_ #_ #_ b = Null? b | val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool
let g_is_null #_ #_ #_ b = | false | null | false | Null? b | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.uu___is_Null",
"Prims.bool"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val g_is_null (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot bool | [] | LowStar.Monotonic.Buffer.g_is_null | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> Prims.GTot Prims.bool | {
"end_col": 34,
"end_line": 94,
"start_col": 27,
"start_line": 94
} |
FStar.Pervasives.Lemma | val modifies_0_ubuffer (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2: HS.mem)
: Lemma (requires (modifies_0 h1 h2)) (ensures (ubuffer_preserved b h1 h2)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r') | val modifies_0_ubuffer (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2: HS.mem)
: Lemma (requires (modifies_0 h1 h2)) (ensures (ubuffer_preserved b h1 h2))
let modifies_0_ubuffer (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2: HS.mem)
: Lemma (requires (modifies_0 h1 h2)) (ensures (ubuffer_preserved b h1 h2)) = | false | null | true | same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r') | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"LowStar.Monotonic.Buffer.ubuffer",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.same_mreference_ubuffer_preserved",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"LowStar.Monotonic.Buffer.modifies_0_mreference",
"Prims.unit",
"LowStar.Monotonic.Buffer.modifies_0",
"Prims.squash",
"LowStar.Monotonic.Buffer.ubuffer_preserved",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2)) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_0_ubuffer (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2: HS.mem)
: Lemma (requires (modifies_0 h1 h2)) (ensures (ubuffer_preserved b h1 h2)) | [] | LowStar.Monotonic.Buffer.modifies_0_ubuffer | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.ubuffer r a ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma (requires LowStar.Monotonic.Buffer.modifies_0 h1 h2)
(ensures LowStar.Monotonic.Buffer.ubuffer_preserved b h1 h2) | {
"end_col": 93,
"end_line": 600,
"start_col": 2,
"start_line": 600
} |
FStar.Pervasives.Lemma | val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)] | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_union_loc_none_r = MG.loc_union_loc_none_r | val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)]
let loc_union_loc_none_r = | false | null | true | MG.loc_union_loc_none_r | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.ModifiesGen.loc_union_loc_none_r",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_union_loc_none_r
(s: loc)
: Lemma
(loc_union s loc_none == s)
[SMTPat (loc_union s loc_none)] | [] | LowStar.Monotonic.Buffer.loc_union_loc_none_r | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(ensures LowStar.Monotonic.Buffer.loc_union s LowStar.Monotonic.Buffer.loc_none == s)
[SMTPat (LowStar.Monotonic.Buffer.loc_union s LowStar.Monotonic.Buffer.loc_none)] | {
"end_col": 50,
"end_line": 796,
"start_col": 27,
"start_line": 796
} |
Prims.GTot | val ubuffer_preserved' (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h': HS.mem) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
))))) | val ubuffer_preserved' (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h': HS.mem) : GTot Type0
let ubuffer_preserved' (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h': HS.mem) : GTot Type0 = | false | null | false | forall (t': Type0) (rrel: srel t') (rel: srel t') (b': mbuffer t' rrel rel).
((frameOf b' == r /\ as_addr b' == a) ==>
((live h b' ==> live h' b') /\
(((live h b' /\ live h' b' /\ Buffer? b') ==>
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen } = Ghost.reveal b in
let Buffer max _ idx len = b' in
(U32.v max == bmax /\ U32.v idx <= boff /\ boff + blen <= U32.v idx + U32.v len) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen))
(Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))))))) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"LowStar.Monotonic.Buffer.ubuffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_Forall",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_imp",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.live",
"Prims.b2t",
"LowStar.Monotonic.Buffer.uu___is_Buffer",
"Prims.bool",
"FStar.UInt32.t",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.Ghost.erased",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Ghost.reveal",
"Prims.int",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt32.n",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.slice",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.op_Subtraction",
"Prims.logical",
"LowStar.Monotonic.Buffer.ubuffer'"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer_preserved' (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h': HS.mem) : GTot Type0 | [] | LowStar.Monotonic.Buffer.ubuffer_preserved' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.ubuffer r a ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 7,
"end_line": 298,
"start_col": 2,
"start_line": 287
} |
FStar.Pervasives.Lemma | val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]] | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x) | val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]]
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x = | false | null | true | if g_is_null x
then ()
else
MG.modifies_preserves_region_liveness_aloc l1
l2
h
h'
#(frameOf x)
#(as_addr x)
(ubuffer_of_buffer x) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.g_is_null",
"Prims.bool",
"FStar.ModifiesGen.modifies_preserves_region_liveness_aloc",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_liveness_insensitive_region_buffer
(l1 l2:loc)
(h h':HS.mem)
(#a:Type0) (#rrel #rel:srel a)
(x:mbuffer a rrel rel)
:Lemma (requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_buffer x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (frameOf x)))
(ensures (HS.live_region h' (frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (frameOf x))];
]] | [] | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_buffer | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
l1: LowStar.Monotonic.Buffer.loc ->
l2: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_union l1 l2) h h' /\
LowStar.Monotonic.Buffer.loc_disjoint l1 (LowStar.Monotonic.Buffer.loc_buffer x) /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.region_liveness_insensitive_locs
l2 /\ FStar.Monotonic.HyperStack.live_region h (LowStar.Monotonic.Buffer.frameOf x))
(ensures FStar.Monotonic.HyperStack.live_region h' (LowStar.Monotonic.Buffer.frameOf x))
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_union l1 l2)
h
h');
SMTPat (FStar.Monotonic.HyperStack.live_region h (LowStar.Monotonic.Buffer.frameOf x))
];
[
SMTPat (LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_union l1 l2)
h
h');
SMTPat (FStar.Monotonic.HyperStack.live_region h' (LowStar.Monotonic.Buffer.frameOf x)
)
]
]
] | {
"end_col": 108,
"end_line": 991,
"start_col": 2,
"start_line": 990
} |
FStar.Pervasives.Lemma | val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))] | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls | val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))]
let loc_disjoint_regions = | false | null | true | MG.loc_disjoint_regions #_ #cls | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.ModifiesGen.loc_disjoint_regions",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_disjoint_regions
(preserve_liveness1: bool)
(preserve_liveness2: bool)
(rs1 rs2: Set.set HS.rid)
: Lemma
(requires (Set.subset (Set.intersect rs1 rs2) Set.empty))
(ensures (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2)))
[SMTPat (loc_disjoint (loc_regions preserve_liveness1 rs1) (loc_regions preserve_liveness2 rs2))] | [] | LowStar.Monotonic.Buffer.loc_disjoint_regions | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
preserve_liveness1: Prims.bool ->
preserve_liveness2: Prims.bool ->
rs1: FStar.Set.set FStar.Monotonic.HyperHeap.rid ->
rs2: FStar.Set.set FStar.Monotonic.HyperHeap.rid
-> FStar.Pervasives.Lemma
(requires FStar.Set.subset (FStar.Set.intersect rs1 rs2) FStar.Set.empty)
(ensures
LowStar.Monotonic.Buffer.loc_disjoint (LowStar.Monotonic.Buffer.loc_regions preserve_liveness1
rs1)
(LowStar.Monotonic.Buffer.loc_regions preserve_liveness2 rs2))
[
SMTPat (LowStar.Monotonic.Buffer.loc_disjoint (LowStar.Monotonic.Buffer.loc_regions preserve_liveness1
rs1)
(LowStar.Monotonic.Buffer.loc_regions preserve_liveness2 rs2))
] | {
"end_col": 58,
"end_line": 926,
"start_col": 27,
"start_line": 926
} |
FStar.Pervasives.Lemma | val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to)) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to) | val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to))
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to = | false | null | true | if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else
MG.loc_includes_aloc #_
#cls
#(frameOf b)
#(as_addr b)
(ubuffer_of_buffer b)
(ubuffer_of_buffer_from_to b from to) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer_from_to_none_cond",
"Prims.bool",
"FStar.ModifiesGen.loc_includes_aloc",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer_from_to",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2 | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_includes_loc_buffer_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from to: U32.t)
: Lemma
(loc_includes (loc_buffer b) (loc_buffer_from_to b from to)) | [] | LowStar.Monotonic.Buffer.loc_includes_loc_buffer_loc_buffer_from_to | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> from: FStar.UInt32.t -> to: FStar.UInt32.t
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_buffer b)
(LowStar.Monotonic.Buffer.loc_buffer_from_to b from to)) | {
"end_col": 121,
"end_line": 862,
"start_col": 2,
"start_line": 860
} |
FStar.Pervasives.Lemma | val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2)) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
) | val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 = | false | null | true | MG.modifies_address_intro #_
#cls
(frameOf b)
(as_addr b)
h1
h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p -> modifies_addr_of_mreference b h1 h2 p)
(fun r n -> modifies_addr_of_unused_in b h1 h2 r n) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"FStar.ModifiesGen.modifies_address_intro",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.modifies_addr_of_live_region",
"Prims.unit",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"LowStar.Monotonic.Buffer.modifies_addr_of_mreference",
"Prims.nat",
"LowStar.Monotonic.Buffer.modifies_addr_of_unused_in"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2)) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2)) | [] | LowStar.Monotonic.Buffer.modifies_addr_of_modifies | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma (requires LowStar.Monotonic.Buffer.modifies_addr_of b h1 h2)
(ensures
LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_addr_of_buffer b) h1 h2) | {
"end_col": 5,
"end_line": 1110,
"start_col": 2,
"start_line": 1103
} |
FStar.Pervasives.Lemma | val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)] | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let fresh_frame_modifies h0 h1 = MG.fresh_frame_modifies #_ cls h0 h1 | val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)]
let fresh_frame_modifies h0 h1 = | false | null | true | MG.fresh_frame_modifies #_ cls h0 h1 | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.ModifiesGen.fresh_frame_modifies",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _
let loc_regions_unused_in = MG.loc_regions_unused_in cls
let loc_unused_in_not_unused_in_disjoint =
MG.loc_unused_in_not_unused_in_disjoint cls
let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls
let live_loc_not_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let unused_in_loc_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.addr_unused_in_does_not_contain_addr h) (frameOf b, as_addr b);
MG.loc_addresses_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let modifies_address_liveness_insensitive_unused_in =
MG.modifies_address_liveness_insensitive_unused_in cls
let modifies_only_not_unused_in = MG.modifies_only_not_unused_in
let mreference_live_loc_not_unused_in =
MG.mreference_live_loc_not_unused_in cls
let mreference_unused_in_loc_unused_in =
MG.mreference_unused_in_loc_unused_in cls
let modifies_loc_unused_in l h1 h2 l' =
modifies_loc_includes address_liveness_insensitive_locs h1 h2 l;
modifies_address_liveness_insensitive_unused_in h1 h2;
loc_includes_trans (loc_unused_in h1) (loc_unused_in h2) l' | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val fresh_frame_modifies (h0 h1: HS.mem) : Lemma
(requires (HS.fresh_frame h0 h1))
(ensures (modifies loc_none h0 h1))
[SMTPat (HS.fresh_frame h0 h1)] | [] | LowStar.Monotonic.Buffer.fresh_frame_modifies | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h0: FStar.Monotonic.HyperStack.mem -> h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma (requires FStar.Monotonic.HyperStack.fresh_frame h0 h1)
(ensures LowStar.Monotonic.Buffer.modifies LowStar.Monotonic.Buffer.loc_none h0 h1)
[SMTPat (FStar.Monotonic.HyperStack.fresh_frame h0 h1)] | {
"end_col": 69,
"end_line": 1250,
"start_col": 33,
"start_line": 1250
} |
FStar.Pervasives.Lemma | val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2))) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2) | val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2)))
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 = | false | null | true | if
ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else
MG.loc_disjoint_aloc_intro #_
#cls
#(frameOf b)
#(as_addr b)
#(frameOf b)
#(as_addr b)
(ubuffer_of_buffer_from_to b from1 to1)
(ubuffer_of_buffer_from_to b from2 to2) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"Prims.op_BarBar",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer_from_to_none_cond",
"Prims.bool",
"FStar.ModifiesGen.loc_disjoint_aloc_intro",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer_from_to",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_disjoint_loc_buffer_from_to
(#a: _) (#rrel #rel: _)
(b: mbuffer a rrel rel)
(from1 to1 from2 to2: U32.t)
: Lemma
(requires (U32.v to1 <= U32.v from2 \/ U32.v to2 <= U32.v from1))
(ensures (loc_disjoint (loc_buffer_from_to b from1 to1) (loc_buffer_from_to b from2 to2))) | [] | LowStar.Monotonic.Buffer.loc_disjoint_loc_buffer_from_to | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
from1: FStar.UInt32.t ->
to1: FStar.UInt32.t ->
from2: FStar.UInt32.t ->
to2: FStar.UInt32.t
-> FStar.Pervasives.Lemma
(requires
FStar.UInt32.v to1 <= FStar.UInt32.v from2 \/ FStar.UInt32.v to2 <= FStar.UInt32.v from1)
(ensures
LowStar.Monotonic.Buffer.loc_disjoint (LowStar.Monotonic.Buffer.loc_buffer_from_to b
from1
to1)
(LowStar.Monotonic.Buffer.loc_buffer_from_to b from2 to2)) | {
"end_col": 173,
"end_line": 922,
"start_col": 2,
"start_line": 920
} |
FStar.Pervasives.Lemma | val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]] | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference | val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]]
let modifies_liveness_insensitive_region_mreference = | false | null | true | MG.modifies_preserves_region_liveness_reference | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.ModifiesGen.modifies_preserves_region_liveness_reference",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_liveness_insensitive_region_mreference
(l1 l2 : loc)
(h h' : HS.mem)
(#t: Type)
(#pre: Preorder.preorder t)
(x: HS.mreference t pre)
: Lemma
(requires (modifies (loc_union l1 l2) h h' /\ loc_disjoint l1 (loc_mreference x) /\ region_liveness_insensitive_locs `loc_includes` l2 /\ HS.live_region h (HS.frameOf x)))
(ensures (HS.live_region h' (HS.frameOf x)))
[SMTPatOr [
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h (HS.frameOf x))];
[SMTPat (modifies (loc_union l1 l2) h h'); SMTPat (HS.live_region h' (HS.frameOf x))];
]] | [] | LowStar.Monotonic.Buffer.modifies_liveness_insensitive_region_mreference | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
l1: LowStar.Monotonic.Buffer.loc ->
l2: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
x: FStar.Monotonic.HyperStack.mreference t pre
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_union l1 l2) h h' /\
LowStar.Monotonic.Buffer.loc_disjoint l1 (LowStar.Monotonic.Buffer.loc_mreference x) /\
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.region_liveness_insensitive_locs
l2 /\ FStar.Monotonic.HyperStack.live_region h (FStar.Monotonic.HyperStack.frameOf x))
(ensures FStar.Monotonic.HyperStack.live_region h' (FStar.Monotonic.HyperStack.frameOf x))
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_union l1 l2)
h
h');
SMTPat (FStar.Monotonic.HyperStack.live_region h
(FStar.Monotonic.HyperStack.frameOf x))
];
[
SMTPat (LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_union l1 l2)
h
h');
SMTPat (FStar.Monotonic.HyperStack.live_region h'
(FStar.Monotonic.HyperStack.frameOf x))
]
]
] | {
"end_col": 101,
"end_line": 987,
"start_col": 54,
"start_line": 987
} |
FStar.Pervasives.Lemma | val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs)) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_regions_unused_in = MG.loc_regions_unused_in cls | val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs))
let loc_regions_unused_in = | false | null | true | MG.loc_regions_unused_in cls | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.ModifiesGen.loc_regions_unused_in",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _ | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_regions_unused_in (h: HS.mem) (rs: Set.set HS.rid) : Lemma
(requires (forall r . Set.mem r rs ==> (~ (HS.live_region h r))))
(ensures (loc_unused_in h `loc_includes` loc_regions false rs)) | [] | LowStar.Monotonic.Buffer.loc_regions_unused_in | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h: FStar.Monotonic.HyperStack.mem -> rs: FStar.Set.set FStar.Monotonic.HyperHeap.rid
-> FStar.Pervasives.Lemma
(requires
forall (r: FStar.Monotonic.HyperHeap.rid).
FStar.Set.mem r rs ==> ~(FStar.Monotonic.HyperStack.live_region h r))
(ensures
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_unused_in h)
(LowStar.Monotonic.Buffer.loc_regions false rs)) | {
"end_col": 56,
"end_line": 1215,
"start_col": 28,
"start_line": 1215
} |
FStar.Pervasives.Lemma | val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
)) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped | val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
))
let modifies_fresh_frame_popped = | false | null | true | MG.modifies_fresh_frame_popped | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.ModifiesGen.modifies_fresh_frame_popped",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_fresh_frame_popped
(h0 h1: HS.mem)
(s: loc)
(h2 h3: HS.mem)
: Lemma
(requires (
HS.fresh_frame h0 h1 /\
modifies (loc_union (loc_all_regions_from false (HS.get_tip h1)) s) h1 h2 /\
(HS.get_tip h2) == (HS.get_tip h1) /\
HS.popped h2 h3
))
(ensures (
modifies s h0 h3 /\
(HS.get_tip h3) == HS.get_tip h0
)) | [] | LowStar.Monotonic.Buffer.modifies_fresh_frame_popped | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem ->
s: LowStar.Monotonic.Buffer.loc ->
h2: FStar.Monotonic.HyperStack.mem ->
h3: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
FStar.Monotonic.HyperStack.fresh_frame h0 h1 /\
LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_all_regions_from
false
(FStar.Monotonic.HyperStack.get_tip h1))
s)
h1
h2 /\ FStar.Monotonic.HyperStack.get_tip h2 == FStar.Monotonic.HyperStack.get_tip h1 /\
FStar.Monotonic.HyperStack.popped h2 h3)
(ensures
LowStar.Monotonic.Buffer.modifies s h0 h3 /\
FStar.Monotonic.HyperStack.get_tip h3 == FStar.Monotonic.HyperStack.get_tip h0) | {
"end_col": 64,
"end_line": 1001,
"start_col": 34,
"start_line": 1001
} |
FStar.Pervasives.Lemma | val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2)) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls | val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2))
let modifies_loc_addresses_intro = | false | null | true | MG.modifies_loc_addresses_intro #_ #cls | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.ModifiesGen.modifies_loc_addresses_intro",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_loc_addresses_intro
(r: HS.rid)
(a: Set.set nat)
(l: loc)
(h1 h2: HS.mem)
: Lemma
(requires (
HS.live_region h2 r /\
modifies (loc_union (loc_region_only false r) l) h1 h2 /\
HS.modifies_ref r a h1 h2
))
(ensures (modifies (loc_union (loc_addresses true r a) l) h1 h2)) | [] | LowStar.Monotonic.Buffer.modifies_loc_addresses_intro | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
r: FStar.Monotonic.HyperHeap.rid ->
a: FStar.Set.set Prims.nat ->
l: LowStar.Monotonic.Buffer.loc ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
FStar.Monotonic.HyperStack.live_region h2 r /\
LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_region_only
false
r)
l)
h1
h2 /\ FStar.Monotonic.HyperStack.modifies_ref r a h1 h2)
(ensures
LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_addresses
true
r
a)
l)
h1
h2) | {
"end_col": 74,
"end_line": 1005,
"start_col": 35,
"start_line": 1005
} |
FStar.Pervasives.Lemma | val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')] | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_salloc_post = MG.modifies_salloc_post #_ #cls | val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')]
let modifies_salloc_post = | false | null | true | MG.modifies_salloc_post #_ #cls | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.ModifiesGen.modifies_salloc_post",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_salloc_post
(#a: Type)
(#rel: Preorder.preorder a)
(init: a)
(h: HS.mem)
(x: HST.mreference a rel { HS.is_stack_region (HS.frameOf x) } )
(h' : HS.mem)
: Lemma
(requires (HST.salloc_post init h x h'))
(ensures (modifies loc_none h h'))
[SMTPat (HST.salloc_post init h x h')] | [] | LowStar.Monotonic.Buffer.modifies_salloc_post | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
init: a ->
h: FStar.Monotonic.HyperStack.mem ->
x:
FStar.HyperStack.ST.mreference a rel
{FStar.Monotonic.HyperStack.is_stack_region (FStar.Monotonic.HyperStack.frameOf x)} ->
h': FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma (requires FStar.HyperStack.ST.salloc_post init h x h')
(ensures LowStar.Monotonic.Buffer.modifies LowStar.Monotonic.Buffer.loc_none h h')
[SMTPat (FStar.HyperStack.ST.salloc_post init h x h')] | {
"end_col": 58,
"end_line": 1009,
"start_col": 27,
"start_line": 1009
} |
FStar.Pervasives.Lemma | val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)] | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let live_loc_not_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
() | val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)]
let live_loc_not_unused_in #_ #_ #_ b h = | false | null | true | unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
() | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.unit",
"FStar.ModifiesGen.loc_addresses_not_unused_in",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls",
"LowStar.Monotonic.Buffer.frameOf",
"FStar.Set.singleton",
"Prims.nat",
"LowStar.Monotonic.Buffer.as_addr",
"FStar.Classical.move_requires",
"FStar.Pervasives.Native.tuple2",
"FStar.Monotonic.HyperHeap.rid",
"FStar.ModifiesGen.does_not_contain_addr",
"Prims.l_imp",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"FStar.Pervasives.Native.fst",
"FStar.Monotonic.Heap.addr_unused_in",
"FStar.Pervasives.Native.snd",
"FStar.Map.sel",
"FStar.Monotonic.Heap.heap",
"FStar.Monotonic.HyperStack.get_hmap",
"FStar.ModifiesGen.does_not_contain_addr_addr_unused_in",
"FStar.Pervasives.Native.Mktuple2",
"LowStar.Monotonic.Buffer.unused_in_equiv"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _
let loc_regions_unused_in = MG.loc_regions_unused_in cls
let loc_unused_in_not_unused_in_disjoint =
MG.loc_unused_in_not_unused_in_disjoint cls
let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val live_loc_not_unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (requires (live h b))
(ensures (loc_not_unused_in h `loc_includes` loc_addr_of_buffer b))
[SMTPat (live h b)] | [] | LowStar.Monotonic.Buffer.live_loc_not_unused_in | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma (requires LowStar.Monotonic.Buffer.live h b)
(ensures
LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_not_unused_in h)
(LowStar.Monotonic.Buffer.loc_addr_of_buffer b))
[SMTPat (LowStar.Monotonic.Buffer.live h b)] | {
"end_col": 4,
"end_line": 1226,
"start_col": 2,
"start_line": 1223
} |
FStar.Pervasives.Lemma | val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))] | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls | val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))]
let region_liveness_insensitive_regions = | false | null | true | MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.ModifiesGen.loc_includes_region_liveness_insensitive_locs_loc_regions",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val region_liveness_insensitive_regions (rs: Set.set HS.rid) : Lemma
(region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))
[SMTPat (region_liveness_insensitive_locs `loc_includes` (loc_regions true rs))] | [] | LowStar.Monotonic.Buffer.region_liveness_insensitive_regions | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | rs: FStar.Set.set FStar.Monotonic.HyperHeap.rid
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.region_liveness_insensitive_locs
(LowStar.Monotonic.Buffer.loc_regions true rs))
[
SMTPat (LowStar.Monotonic.Buffer.loc_includes LowStar.Monotonic.Buffer.region_liveness_insensitive_locs
(LowStar.Monotonic.Buffer.loc_regions true rs))
] | {
"end_col": 66,
"end_line": 972,
"start_col": 2,
"start_line": 972
} |
FStar.Pervasives.Lemma | val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2))) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let disjoint_neq #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else () | val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2)))
let disjoint_neq #_ #_ #_ #_ #_ #_ b1 b2 = | false | null | true | if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2
then
MG.loc_disjoint_aloc_elim #_
#cls
#(frameOf b1)
#(as_addr b1)
#(frameOf b2)
#(as_addr b2)
(ubuffer_of_buffer b1)
(ubuffer_of_buffer b2) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.op_AmpAmp",
"Prims.op_Equality",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"Prims.nat",
"LowStar.Monotonic.Buffer.as_addr",
"FStar.ModifiesGen.loc_disjoint_aloc_elim",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer",
"Prims.bool",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _
let loc_regions_unused_in = MG.loc_regions_unused_in cls
let loc_unused_in_not_unused_in_disjoint =
MG.loc_unused_in_not_unused_in_disjoint cls
let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls
let live_loc_not_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let unused_in_loc_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.addr_unused_in_does_not_contain_addr h) (frameOf b, as_addr b);
MG.loc_addresses_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let modifies_address_liveness_insensitive_unused_in =
MG.modifies_address_liveness_insensitive_unused_in cls
let modifies_only_not_unused_in = MG.modifies_only_not_unused_in
let mreference_live_loc_not_unused_in =
MG.mreference_live_loc_not_unused_in cls
let mreference_unused_in_loc_unused_in =
MG.mreference_unused_in_loc_unused_in cls
let modifies_loc_unused_in l h1 h2 l' =
modifies_loc_includes address_liveness_insensitive_locs h1 h2 l;
modifies_address_liveness_insensitive_unused_in h1 h2;
loc_includes_trans (loc_unused_in h1) (loc_unused_in h2) l'
let fresh_frame_modifies h0 h1 = MG.fresh_frame_modifies #_ cls h0 h1
let popped_modifies = MG.popped_modifies #_ cls
let modifies_remove_new_locs l_fresh l_aux l_goal h1 h2 h3 =
modifies_only_not_unused_in l_goal h1 h3 | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val disjoint_neq (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires (disjoint b1 b2 /\ U32.v (len b1) > 0))
(ensures (~(b1 === b2))) | [] | LowStar.Monotonic.Buffer.disjoint_neq | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b1: LowStar.Monotonic.Buffer.mbuffer a1 rrel1 rel1 ->
b2: LowStar.Monotonic.Buffer.mbuffer a2 rrel2 rel2
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.disjoint b1 b2 /\
FStar.UInt32.v (LowStar.Monotonic.Buffer.len b1) > 0) (ensures ~(b1 === b2)) | {
"end_col": 9,
"end_line": 1260,
"start_col": 2,
"start_line": 1258
} |
FStar.Pervasives.Lemma | val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2)) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
) | val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 = | false | null | true | if g_is_null b
then
(modifies_1_null b h1 h2;
modifies_0_modifies h1 h2)
else
MG.modifies_intro (loc_buffer b)
h1
h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_
#cls
#(frameOf b)
#(as_addr b)
(ubuffer_of_buffer b)
true
(HS.frameOf p)
(Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p)
(fun t pre p -> modifies_1_liveness b h1 h2 p)
(fun r n -> modifies_1_unused_in b h1 h2 r n)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r'
#a'
b'
h1
h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.g_is_null",
"LowStar.Monotonic.Buffer.modifies_0_modifies",
"Prims.unit",
"LowStar.Monotonic.Buffer.modifies_1_null",
"Prims.bool",
"FStar.ModifiesGen.modifies_intro",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls",
"LowStar.Monotonic.Buffer.loc_buffer",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.modifies_1_live_region",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"LowStar.Monotonic.Buffer.modifies_1_mreference",
"FStar.ModifiesGen.loc_disjoint_aloc_addresses_elim",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer",
"FStar.Monotonic.HyperStack.frameOf",
"FStar.Set.singleton",
"Prims.nat",
"FStar.Monotonic.HyperStack.as_addr",
"LowStar.Monotonic.Buffer.loc_disjoint_sym",
"LowStar.Monotonic.Buffer.loc_mreference",
"LowStar.Monotonic.Buffer.modifies_1_liveness",
"LowStar.Monotonic.Buffer.modifies_1_unused_in",
"Prims.op_AmpAmp",
"Prims.op_Equality",
"LowStar.Monotonic.Buffer.modifies_1_ubuffer",
"LowStar.Monotonic.Buffer.same_mreference_ubuffer_preserved",
"FStar.ModifiesGen.loc_disjoint_aloc_elim",
"FStar.ModifiesGen.loc_of_aloc"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2)) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2)) | [] | LowStar.Monotonic.Buffer.modifies_1_modifies | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma (requires LowStar.Monotonic.Buffer.modifies_1 b h1 h2)
(ensures LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_buffer b) h1 h2) | {
"end_col": 5,
"end_line": 1061,
"start_col": 2,
"start_line": 1034
} |
FStar.Pervasives.Lemma | val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))] | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls | val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))]
let loc_disjoint_addresses = | false | null | true | MG.loc_disjoint_addresses_intro #_ #cls | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.ModifiesGen.loc_disjoint_addresses_intro",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_disjoint_addresses
(preserve_liveness1 preserve_liveness2: bool)
(r1 r2: HS.rid)
(n1 n2: Set.set nat)
: Lemma
(requires (r1 <> r2 \/ Set.subset (Set.intersect n1 n2) Set.empty))
(ensures (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2)))
[SMTPat (loc_disjoint (loc_addresses preserve_liveness1 r1 n1) (loc_addresses preserve_liveness2 r2 n2))] | [] | LowStar.Monotonic.Buffer.loc_disjoint_addresses | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
preserve_liveness1: Prims.bool ->
preserve_liveness2: Prims.bool ->
r1: FStar.Monotonic.HyperHeap.rid ->
r2: FStar.Monotonic.HyperHeap.rid ->
n1: FStar.Set.set Prims.nat ->
n2: FStar.Set.set Prims.nat
-> FStar.Pervasives.Lemma
(requires r1 <> r2 \/ FStar.Set.subset (FStar.Set.intersect n1 n2) FStar.Set.empty)
(ensures
LowStar.Monotonic.Buffer.loc_disjoint (LowStar.Monotonic.Buffer.loc_addresses preserve_liveness1
r1
n1)
(LowStar.Monotonic.Buffer.loc_addresses preserve_liveness2 r2 n2))
[
SMTPat (LowStar.Monotonic.Buffer.loc_disjoint (LowStar.Monotonic.Buffer.loc_addresses preserve_liveness1
r1
n1)
(LowStar.Monotonic.Buffer.loc_addresses preserve_liveness2 r2 n2))
] | {
"end_col": 68,
"end_line": 924,
"start_col": 29,
"start_line": 924
} |
Prims.GTot | val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) =
(not (g_is_null b)) /\
HS.is_mm (Buffer?.content b) /\
HS.is_heap_color (HS.color (frameOf b)) /\
U32.v (Buffer?.max_length b) > 0 /\
Buffer?.idx b == 0ul /\
Ghost.reveal (Buffer?.length b) == Buffer?.max_length b | val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0
let freeable (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) = | false | null | false | (not (g_is_null b)) /\ HS.is_mm (Buffer?.content b) /\ HS.is_heap_color (HS.color (frameOf b)) /\
U32.v (Buffer?.max_length b) > 0 /\ Buffer?.idx b == 0ul /\
Ghost.reveal (Buffer?.length b) == Buffer?.max_length b | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_and",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperStack.is_mm",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__max_length",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__content",
"FStar.Monotonic.HyperStack.is_heap_color",
"FStar.Monotonic.HyperHeap.color",
"LowStar.Monotonic.Buffer.frameOf",
"Prims.op_GreaterThan",
"Prims.eq2",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__idx",
"FStar.UInt32.__uint_to_t",
"FStar.Ghost.reveal",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__length"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _
let loc_regions_unused_in = MG.loc_regions_unused_in cls
let loc_unused_in_not_unused_in_disjoint =
MG.loc_unused_in_not_unused_in_disjoint cls
let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls
let live_loc_not_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let unused_in_loc_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.addr_unused_in_does_not_contain_addr h) (frameOf b, as_addr b);
MG.loc_addresses_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let modifies_address_liveness_insensitive_unused_in =
MG.modifies_address_liveness_insensitive_unused_in cls
let modifies_only_not_unused_in = MG.modifies_only_not_unused_in
let mreference_live_loc_not_unused_in =
MG.mreference_live_loc_not_unused_in cls
let mreference_unused_in_loc_unused_in =
MG.mreference_unused_in_loc_unused_in cls
let modifies_loc_unused_in l h1 h2 l' =
modifies_loc_includes address_liveness_insensitive_locs h1 h2 l;
modifies_address_liveness_insensitive_unused_in h1 h2;
loc_includes_trans (loc_unused_in h1) (loc_unused_in h2) l'
let fresh_frame_modifies h0 h1 = MG.fresh_frame_modifies #_ cls h0 h1
let popped_modifies = MG.popped_modifies #_ cls
let modifies_remove_new_locs l_fresh l_aux l_goal h1 h2 h3 =
modifies_only_not_unused_in l_goal h1 h3
let disjoint_neq #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
let empty_disjoint
#t1 #t2 #rrel1 #rel1 #rrel2 #rel2 b1 b2
= let r = frameOf b1 in
let a = as_addr b1 in
if r = frameOf b2 && a = as_addr b2 then
MG.loc_disjoint_aloc_intro #_ #cls #r #a #r #a (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
(*
let includes_live #a #rrel #rel1 #rel2 h larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let includes_frameOf_as_addr #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
let pointer_distinct_sel_disjoint #a #_ #_ #_ #_ b1 b2 h =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2
then begin
HS.mreference_distinct_sel_disjoint h (Buffer?.content b1) (Buffer?.content b2);
loc_disjoint_buffer b1 b2
end
else
loc_disjoint_buffer b1 b2
let is_null #_ #_ #_ b = Null? b
let msub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content i0 len0 ->
Buffer max_len content (U32.add i0 i) len
let moffset #a #rrel #rel sub_rel b i =
match b with
| Null -> Null
| Buffer max_len content i0 len ->
Buffer max_len content (U32.add i0 i) (Ghost.hide ((U32.sub (Ghost.reveal len) i)))
let index #_ #_ #_ b i =
let open HST in
let s = ! (Buffer?.content b) in
Seq.index s (U32.v (Buffer?.idx b) + U32.v i)
let g_upd_seq #_ #_ #_ b s h =
if Seq.length s = 0 then h
else
let s0 = HS.sel h (Buffer?.content b) in
let Buffer _ content idx length = b in
HS.upd h (Buffer?.content b) (Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v length) s)
let lemma_g_upd_with_same_seq #_ #_ #_ b h =
if Null? b then ()
else
let open FStar.UInt32 in
let Buffer _ content idx length = b in
let s = HS.sel h content in
assert (Seq.equal (Seq.replace_subseq s (v idx) (v idx + v length) (Seq.slice s (v idx) (v idx + v length))) s);
HS.lemma_heap_equality_upd_with_sel h (Buffer?.content b)
#push-options "--z3rlimit 48"
let g_upd_seq_as_seq #a #_ #_ b s h =
let h' = g_upd_seq b s h in
if g_is_null b then assert (Seq.equal s Seq.empty)
else begin
assert (Seq.equal (as_seq h' b) s);
// prove modifies_1_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_modifies b h h'
end
let g_upd_modifies_strong #_ #_ #_ b i v h =
let h' = g_upd b i v h in
// prove modifies_1_from_to_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_from_to_modifies b (U32.uint_to_t i) (U32.uint_to_t (i + 1)) h h'
#pop-options
let upd' #_ #_ #_ b i v =
let open HST in
let h = get() in
let Buffer max_length content idx len = b in
let s0 = !content in
let sb0 = Seq.slice s0 (U32.v idx) (U32.v max_length) in
let s_upd = Seq.upd sb0 (U32.v i) v in
let sf = Seq.replace_subseq s0 (U32.v idx) (U32.v max_length) s_upd in
assert (sf `Seq.equal`
Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v len) (Seq.upd (as_seq h b) (U32.v i) v));
content := sf
let recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 =
(not (g_is_null b)) ==> (
HST.is_eternal_region (frameOf b) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_buf #_ #_ #_ b =
(not (g_is_null b)) ==> (
HS.is_heap_color (HS.color (frameOf b)) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_sub #a #rrel #rel #subrel b0 b1 =
match b1 with
| Null -> ()
| Buffer max_len content idx length ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j subrel)
let recallable_null #_ #_ #_ = ()
let recallable_mgsub #_ #rrel #rel b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
(*
let recallable_includes #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let recall #_ #_ #_ b = if Null? b then () else HST.recall (Buffer?.content b)
private let spred_as_mempred (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.mem_predicate
= fun h ->
buffer_compatible b ==>
p (as_seq h b)
let witnessed #_ #rrel #rel b p =
match b with
| Null -> p Seq.empty
| Buffer max_length content idx length ->
HST.token_p content (spred_as_mempred b p)
private let lemma_stable_on_rel_is_stable_on_rrel (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:spred a)
:Lemma (requires (Buffer? b /\ stable_on p rel))
(ensures (HST.stable_on (spred_as_mempred b p) (Buffer?.content b)))
= let Buffer max_length content idx length = b in
let mp = spred_as_mempred b p in
let aux (h0 h1:HS.mem) :Lemma ((mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content)) ==> mp h1)
= Classical.arrow_to_impl #(mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content) /\ buffer_compatible b) #(mp h1)
(fun _ -> assert (rel (as_seq h0 b) (as_seq h1 b)))
in
Classical.forall_intro_2 aux
let witness_p #a #rrel #rel b p =
match b with
| Null -> ()
| Buffer _ content _ _ ->
lemma_stable_on_rel_is_stable_on_rrel b p;
//AR: TODO: the proof doesn't go through without this assertion, which should follow directly from the lemma call
assert (HST.stable_on #(Seq.lseq a (U32.v (Buffer?.max_length b))) #(srel_to_lsrel (U32.v (Buffer?.max_length b)) rrel) (spred_as_mempred b p) (Buffer?.content b));
HST.witness_p content (spred_as_mempred b p)
let recall_p #_ #_ #_ b p =
match b with
| Null -> ()
| Buffer _ content _ _ -> HST.recall_p content (spred_as_mempred b p)
let witnessed_functorial #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 =
match b1, b2 with
| Null, Null -> assert (as_seq HS.empty_mem b1 == Seq.empty)
| Buffer _ content _ _, _ ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j rel1);
HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2)
let witnessed_functorial_st #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 =
match b1, b2 with
| Null, Null -> ()
| Buffer _ content _ _, _ ->
HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 | [] | LowStar.Monotonic.Buffer.freeable | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> Prims.GTot Type0 | {
"end_col": 57,
"end_line": 1456,
"start_col": 2,
"start_line": 1451
} |
FStar.Pervasives.Lemma | val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)] | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2) | val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)]
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 = | false | null | true | loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.loc_disjoint_buffer",
"LowStar.Monotonic.Buffer.mgsub",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_disjoint_gsub_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel)
(i1:UInt32.t) (len1:UInt32.t) (sub_rel1:srel a)
(i2:UInt32.t) (len2:UInt32.t) (sub_rel2:srel a)
:Lemma (requires (UInt32.v i1 + UInt32.v len1 <= (length b) /\
UInt32.v i2 + UInt32.v len2 <= (length b) /\
(UInt32.v i1 + UInt32.v len1 <= UInt32.v i2 \/
UInt32.v i2 + UInt32.v len2 <= UInt32.v i1)))
(ensures (loc_disjoint (loc_buffer (mgsub sub_rel1 b i1 len1)) (loc_buffer (mgsub sub_rel2 b i2 len2))))
[SMTPat (mgsub sub_rel1 b i1 len1); SMTPat (mgsub sub_rel2 b i2 len2)] | [] | LowStar.Monotonic.Buffer.loc_disjoint_gsub_buffer | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
i1: FStar.UInt32.t ->
len1: FStar.UInt32.t ->
sub_rel1: LowStar.Monotonic.Buffer.srel a ->
i2: FStar.UInt32.t ->
len2: FStar.UInt32.t ->
sub_rel2: LowStar.Monotonic.Buffer.srel a
-> FStar.Pervasives.Lemma
(requires
FStar.UInt32.v i1 + FStar.UInt32.v len1 <= LowStar.Monotonic.Buffer.length b /\
FStar.UInt32.v i2 + FStar.UInt32.v len2 <= LowStar.Monotonic.Buffer.length b /\
(FStar.UInt32.v i1 + FStar.UInt32.v len1 <= FStar.UInt32.v i2 \/
FStar.UInt32.v i2 + FStar.UInt32.v len2 <= FStar.UInt32.v i1))
(ensures
LowStar.Monotonic.Buffer.loc_disjoint (LowStar.Monotonic.Buffer.loc_buffer (LowStar.Monotonic.Buffer.mgsub
sub_rel1
b
i1
len1))
(LowStar.Monotonic.Buffer.loc_buffer (LowStar.Monotonic.Buffer.mgsub sub_rel2 b i2 len2)))
[
SMTPat (LowStar.Monotonic.Buffer.mgsub sub_rel1 b i1 len1);
SMTPat (LowStar.Monotonic.Buffer.mgsub sub_rel2 b i2 len2)
] | {
"end_col": 75,
"end_line": 917,
"start_col": 2,
"start_line": 917
} |
FStar.Pervasives.Lemma | val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2)) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let empty_disjoint
#t1 #t2 #rrel1 #rel1 #rrel2 #rel2 b1 b2
= let r = frameOf b1 in
let a = as_addr b1 in
if r = frameOf b2 && a = as_addr b2 then
MG.loc_disjoint_aloc_intro #_ #cls #r #a #r #a (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else () | val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2))
let empty_disjoint #t1 #t2 #rrel1 #rel1 #rrel2 #rel2 b1 b2 = | false | null | true | let r = frameOf b1 in
let a = as_addr b1 in
if r = frameOf b2 && a = as_addr b2
then MG.loc_disjoint_aloc_intro #_ #cls #r #a #r #a (ubuffer_of_buffer b1) (ubuffer_of_buffer b2) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.op_AmpAmp",
"Prims.op_Equality",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"Prims.nat",
"LowStar.Monotonic.Buffer.as_addr",
"FStar.ModifiesGen.loc_disjoint_aloc_intro",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer",
"Prims.bool",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _
let loc_regions_unused_in = MG.loc_regions_unused_in cls
let loc_unused_in_not_unused_in_disjoint =
MG.loc_unused_in_not_unused_in_disjoint cls
let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls
let live_loc_not_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let unused_in_loc_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.addr_unused_in_does_not_contain_addr h) (frameOf b, as_addr b);
MG.loc_addresses_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let modifies_address_liveness_insensitive_unused_in =
MG.modifies_address_liveness_insensitive_unused_in cls
let modifies_only_not_unused_in = MG.modifies_only_not_unused_in
let mreference_live_loc_not_unused_in =
MG.mreference_live_loc_not_unused_in cls
let mreference_unused_in_loc_unused_in =
MG.mreference_unused_in_loc_unused_in cls
let modifies_loc_unused_in l h1 h2 l' =
modifies_loc_includes address_liveness_insensitive_locs h1 h2 l;
modifies_address_liveness_insensitive_unused_in h1 h2;
loc_includes_trans (loc_unused_in h1) (loc_unused_in h2) l'
let fresh_frame_modifies h0 h1 = MG.fresh_frame_modifies #_ cls h0 h1
let popped_modifies = MG.popped_modifies #_ cls
let modifies_remove_new_locs l_fresh l_aux l_goal h1 h2 h3 =
modifies_only_not_unused_in l_goal h1 h3
let disjoint_neq #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
let empty_disjoint | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val empty_disjoint (#t1 #t2: Type) (#rrel1 #rel1: srel t1) (#rrel2 #rel2: srel t2) (b1: mbuffer t1 rrel1 rel1) (b2: mbuffer t2 rrel2 rel2) : Lemma
(requires (length b1 == 0))
(ensures (disjoint b1 b2)) | [] | LowStar.Monotonic.Buffer.empty_disjoint | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b1: LowStar.Monotonic.Buffer.mbuffer t1 rrel1 rel1 ->
b2: LowStar.Monotonic.Buffer.mbuffer t2 rrel2 rel2
-> FStar.Pervasives.Lemma (requires LowStar.Monotonic.Buffer.length b1 == 0)
(ensures LowStar.Monotonic.Buffer.disjoint b1 b2) | {
"end_col": 9,
"end_line": 1268,
"start_col": 1,
"start_line": 1264
} |
FStar.HyperStack.ST.Stack | val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b)) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let recall #_ #_ #_ b = if Null? b then () else HST.recall (Buffer?.content b) | val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b))
let recall #_ #_ #_ b = | true | null | false | if Null? b then () else HST.recall (Buffer?.content b) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.uu___is_Null",
"Prims.unit",
"Prims.bool",
"FStar.HyperStack.ST.recall",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__max_length",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__content"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _
let loc_regions_unused_in = MG.loc_regions_unused_in cls
let loc_unused_in_not_unused_in_disjoint =
MG.loc_unused_in_not_unused_in_disjoint cls
let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls
let live_loc_not_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let unused_in_loc_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.addr_unused_in_does_not_contain_addr h) (frameOf b, as_addr b);
MG.loc_addresses_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let modifies_address_liveness_insensitive_unused_in =
MG.modifies_address_liveness_insensitive_unused_in cls
let modifies_only_not_unused_in = MG.modifies_only_not_unused_in
let mreference_live_loc_not_unused_in =
MG.mreference_live_loc_not_unused_in cls
let mreference_unused_in_loc_unused_in =
MG.mreference_unused_in_loc_unused_in cls
let modifies_loc_unused_in l h1 h2 l' =
modifies_loc_includes address_liveness_insensitive_locs h1 h2 l;
modifies_address_liveness_insensitive_unused_in h1 h2;
loc_includes_trans (loc_unused_in h1) (loc_unused_in h2) l'
let fresh_frame_modifies h0 h1 = MG.fresh_frame_modifies #_ cls h0 h1
let popped_modifies = MG.popped_modifies #_ cls
let modifies_remove_new_locs l_fresh l_aux l_goal h1 h2 h3 =
modifies_only_not_unused_in l_goal h1 h3
let disjoint_neq #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
let empty_disjoint
#t1 #t2 #rrel1 #rel1 #rrel2 #rel2 b1 b2
= let r = frameOf b1 in
let a = as_addr b1 in
if r = frameOf b2 && a = as_addr b2 then
MG.loc_disjoint_aloc_intro #_ #cls #r #a #r #a (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
(*
let includes_live #a #rrel #rel1 #rel2 h larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let includes_frameOf_as_addr #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
let pointer_distinct_sel_disjoint #a #_ #_ #_ #_ b1 b2 h =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2
then begin
HS.mreference_distinct_sel_disjoint h (Buffer?.content b1) (Buffer?.content b2);
loc_disjoint_buffer b1 b2
end
else
loc_disjoint_buffer b1 b2
let is_null #_ #_ #_ b = Null? b
let msub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content i0 len0 ->
Buffer max_len content (U32.add i0 i) len
let moffset #a #rrel #rel sub_rel b i =
match b with
| Null -> Null
| Buffer max_len content i0 len ->
Buffer max_len content (U32.add i0 i) (Ghost.hide ((U32.sub (Ghost.reveal len) i)))
let index #_ #_ #_ b i =
let open HST in
let s = ! (Buffer?.content b) in
Seq.index s (U32.v (Buffer?.idx b) + U32.v i)
let g_upd_seq #_ #_ #_ b s h =
if Seq.length s = 0 then h
else
let s0 = HS.sel h (Buffer?.content b) in
let Buffer _ content idx length = b in
HS.upd h (Buffer?.content b) (Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v length) s)
let lemma_g_upd_with_same_seq #_ #_ #_ b h =
if Null? b then ()
else
let open FStar.UInt32 in
let Buffer _ content idx length = b in
let s = HS.sel h content in
assert (Seq.equal (Seq.replace_subseq s (v idx) (v idx + v length) (Seq.slice s (v idx) (v idx + v length))) s);
HS.lemma_heap_equality_upd_with_sel h (Buffer?.content b)
#push-options "--z3rlimit 48"
let g_upd_seq_as_seq #a #_ #_ b s h =
let h' = g_upd_seq b s h in
if g_is_null b then assert (Seq.equal s Seq.empty)
else begin
assert (Seq.equal (as_seq h' b) s);
// prove modifies_1_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_modifies b h h'
end
let g_upd_modifies_strong #_ #_ #_ b i v h =
let h' = g_upd b i v h in
// prove modifies_1_from_to_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_from_to_modifies b (U32.uint_to_t i) (U32.uint_to_t (i + 1)) h h'
#pop-options
let upd' #_ #_ #_ b i v =
let open HST in
let h = get() in
let Buffer max_length content idx len = b in
let s0 = !content in
let sb0 = Seq.slice s0 (U32.v idx) (U32.v max_length) in
let s_upd = Seq.upd sb0 (U32.v i) v in
let sf = Seq.replace_subseq s0 (U32.v idx) (U32.v max_length) s_upd in
assert (sf `Seq.equal`
Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v len) (Seq.upd (as_seq h b) (U32.v i) v));
content := sf
let recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 =
(not (g_is_null b)) ==> (
HST.is_eternal_region (frameOf b) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_buf #_ #_ #_ b =
(not (g_is_null b)) ==> (
HS.is_heap_color (HS.color (frameOf b)) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_sub #a #rrel #rel #subrel b0 b1 =
match b1 with
| Null -> ()
| Buffer max_len content idx length ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j subrel)
let recallable_null #_ #_ #_ = ()
let recallable_mgsub #_ #rrel #rel b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
(*
let recallable_includes #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val recall (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:HST.Stack unit (requires (fun m -> recallable b \/ (region_lifetime_buf b /\ HS.live_region m (frameOf b))))
(ensures (fun m0 _ m1 -> m0 == m1 /\ live m1 b)) | [] | LowStar.Monotonic.Buffer.recall | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 78,
"end_line": 1397,
"start_col": 24,
"start_line": 1397
} |
FStar.HyperStack.ST.ST | val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mgcmalloc #_ #_ r init len =
alloc_heap_common r len (Seq.create (U32.v len) init) false | val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)))
let mgcmalloc #_ #_ r init len = | true | null | false | alloc_heap_common r len (Seq.create (U32.v len) init) false | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperHeap.rid",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.alloc_heap_common",
"FStar.Seq.Base.create",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.lmbuffer",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.recallable"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _
let loc_regions_unused_in = MG.loc_regions_unused_in cls
let loc_unused_in_not_unused_in_disjoint =
MG.loc_unused_in_not_unused_in_disjoint cls
let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls
let live_loc_not_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let unused_in_loc_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.addr_unused_in_does_not_contain_addr h) (frameOf b, as_addr b);
MG.loc_addresses_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let modifies_address_liveness_insensitive_unused_in =
MG.modifies_address_liveness_insensitive_unused_in cls
let modifies_only_not_unused_in = MG.modifies_only_not_unused_in
let mreference_live_loc_not_unused_in =
MG.mreference_live_loc_not_unused_in cls
let mreference_unused_in_loc_unused_in =
MG.mreference_unused_in_loc_unused_in cls
let modifies_loc_unused_in l h1 h2 l' =
modifies_loc_includes address_liveness_insensitive_locs h1 h2 l;
modifies_address_liveness_insensitive_unused_in h1 h2;
loc_includes_trans (loc_unused_in h1) (loc_unused_in h2) l'
let fresh_frame_modifies h0 h1 = MG.fresh_frame_modifies #_ cls h0 h1
let popped_modifies = MG.popped_modifies #_ cls
let modifies_remove_new_locs l_fresh l_aux l_goal h1 h2 h3 =
modifies_only_not_unused_in l_goal h1 h3
let disjoint_neq #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
let empty_disjoint
#t1 #t2 #rrel1 #rel1 #rrel2 #rel2 b1 b2
= let r = frameOf b1 in
let a = as_addr b1 in
if r = frameOf b2 && a = as_addr b2 then
MG.loc_disjoint_aloc_intro #_ #cls #r #a #r #a (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
(*
let includes_live #a #rrel #rel1 #rel2 h larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let includes_frameOf_as_addr #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
let pointer_distinct_sel_disjoint #a #_ #_ #_ #_ b1 b2 h =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2
then begin
HS.mreference_distinct_sel_disjoint h (Buffer?.content b1) (Buffer?.content b2);
loc_disjoint_buffer b1 b2
end
else
loc_disjoint_buffer b1 b2
let is_null #_ #_ #_ b = Null? b
let msub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content i0 len0 ->
Buffer max_len content (U32.add i0 i) len
let moffset #a #rrel #rel sub_rel b i =
match b with
| Null -> Null
| Buffer max_len content i0 len ->
Buffer max_len content (U32.add i0 i) (Ghost.hide ((U32.sub (Ghost.reveal len) i)))
let index #_ #_ #_ b i =
let open HST in
let s = ! (Buffer?.content b) in
Seq.index s (U32.v (Buffer?.idx b) + U32.v i)
let g_upd_seq #_ #_ #_ b s h =
if Seq.length s = 0 then h
else
let s0 = HS.sel h (Buffer?.content b) in
let Buffer _ content idx length = b in
HS.upd h (Buffer?.content b) (Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v length) s)
let lemma_g_upd_with_same_seq #_ #_ #_ b h =
if Null? b then ()
else
let open FStar.UInt32 in
let Buffer _ content idx length = b in
let s = HS.sel h content in
assert (Seq.equal (Seq.replace_subseq s (v idx) (v idx + v length) (Seq.slice s (v idx) (v idx + v length))) s);
HS.lemma_heap_equality_upd_with_sel h (Buffer?.content b)
#push-options "--z3rlimit 48"
let g_upd_seq_as_seq #a #_ #_ b s h =
let h' = g_upd_seq b s h in
if g_is_null b then assert (Seq.equal s Seq.empty)
else begin
assert (Seq.equal (as_seq h' b) s);
// prove modifies_1_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_modifies b h h'
end
let g_upd_modifies_strong #_ #_ #_ b i v h =
let h' = g_upd b i v h in
// prove modifies_1_from_to_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_from_to_modifies b (U32.uint_to_t i) (U32.uint_to_t (i + 1)) h h'
#pop-options
let upd' #_ #_ #_ b i v =
let open HST in
let h = get() in
let Buffer max_length content idx len = b in
let s0 = !content in
let sb0 = Seq.slice s0 (U32.v idx) (U32.v max_length) in
let s_upd = Seq.upd sb0 (U32.v i) v in
let sf = Seq.replace_subseq s0 (U32.v idx) (U32.v max_length) s_upd in
assert (sf `Seq.equal`
Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v len) (Seq.upd (as_seq h b) (U32.v i) v));
content := sf
let recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 =
(not (g_is_null b)) ==> (
HST.is_eternal_region (frameOf b) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_buf #_ #_ #_ b =
(not (g_is_null b)) ==> (
HS.is_heap_color (HS.color (frameOf b)) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_sub #a #rrel #rel #subrel b0 b1 =
match b1 with
| Null -> ()
| Buffer max_len content idx length ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j subrel)
let recallable_null #_ #_ #_ = ()
let recallable_mgsub #_ #rrel #rel b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
(*
let recallable_includes #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let recall #_ #_ #_ b = if Null? b then () else HST.recall (Buffer?.content b)
private let spred_as_mempred (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.mem_predicate
= fun h ->
buffer_compatible b ==>
p (as_seq h b)
let witnessed #_ #rrel #rel b p =
match b with
| Null -> p Seq.empty
| Buffer max_length content idx length ->
HST.token_p content (spred_as_mempred b p)
private let lemma_stable_on_rel_is_stable_on_rrel (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:spred a)
:Lemma (requires (Buffer? b /\ stable_on p rel))
(ensures (HST.stable_on (spred_as_mempred b p) (Buffer?.content b)))
= let Buffer max_length content idx length = b in
let mp = spred_as_mempred b p in
let aux (h0 h1:HS.mem) :Lemma ((mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content)) ==> mp h1)
= Classical.arrow_to_impl #(mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content) /\ buffer_compatible b) #(mp h1)
(fun _ -> assert (rel (as_seq h0 b) (as_seq h1 b)))
in
Classical.forall_intro_2 aux
let witness_p #a #rrel #rel b p =
match b with
| Null -> ()
| Buffer _ content _ _ ->
lemma_stable_on_rel_is_stable_on_rrel b p;
//AR: TODO: the proof doesn't go through without this assertion, which should follow directly from the lemma call
assert (HST.stable_on #(Seq.lseq a (U32.v (Buffer?.max_length b))) #(srel_to_lsrel (U32.v (Buffer?.max_length b)) rrel) (spred_as_mempred b p) (Buffer?.content b));
HST.witness_p content (spred_as_mempred b p)
let recall_p #_ #_ #_ b p =
match b with
| Null -> ()
| Buffer _ content _ _ -> HST.recall_p content (spred_as_mempred b p)
let witnessed_functorial #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 =
match b1, b2 with
| Null, Null -> assert (as_seq HS.empty_mem b1 == Seq.empty)
| Buffer _ content _ _, _ ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j rel1);
HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2)
let witnessed_functorial_st #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 =
match b1, b2 with
| Null, Null -> ()
| Buffer _ content _ _, _ ->
HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2)
let freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) =
(not (g_is_null b)) /\
HS.is_mm (Buffer?.content b) /\
HS.is_heap_color (HS.color (frameOf b)) /\
U32.v (Buffer?.max_length b) > 0 /\
Buffer?.idx b == 0ul /\
Ghost.reveal (Buffer?.length b) == Buffer?.max_length b
let free #_ #_ #_ b = HST.rfree (Buffer?.content b)
let freeable_length #_ #_ #_ b = ()
let freeable_disjoint #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
private let alloc_heap_common (#a:Type0) (#rrel:srel a)
(r:HST.erid) (len:U32.t{U32.v len > 0}) (s:Seq.seq a{Seq.length s == U32.v len})
(mm:bool)
:HST.ST (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> True))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 s /\
frameOf b == r /\
HS.is_mm (Buffer?.content b) == mm /\
Buffer?.idx b == 0ul /\
Ghost.reveal (Buffer?.length b) == Buffer?.max_length b))
= lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
if mm then HST.ralloc_mm r s else HST.ralloc r s
in
let b = Buffer len content 0ul (Ghost.hide len) in
b | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mgcmalloc (#a:Type0) (#rrel:srel a)
(r:HS.rid) (init:a) (len:U32.t)
:HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == r /\ recallable b})
(requires (fun _ -> malloc_pre r len))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init))) | [] | LowStar.Monotonic.Buffer.mgcmalloc | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Monotonic.HyperHeap.rid -> init: a -> len: FStar.UInt32.t
-> FStar.HyperStack.ST.ST
(b:
LowStar.Monotonic.Buffer.lmbuffer a rrel rrel (FStar.UInt32.v len)
{LowStar.Monotonic.Buffer.frameOf b == r /\ LowStar.Monotonic.Buffer.recallable b}) | {
"end_col": 61,
"end_line": 1484,
"start_col": 2,
"start_line": 1484
} |
Prims.Tot | val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b}) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mnull #_ #_ #_ = Null | val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b})
let mnull #_ #_ #_ = | false | null | false | Null | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.Null",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.b2t",
"LowStar.Monotonic.Buffer.g_is_null"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mnull (#a:Type0) (#rrel #rel:srel a) :Tot (b:mbuffer a rrel rel {g_is_null b}) | [] | LowStar.Monotonic.Buffer.mnull | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel {LowStar.Monotonic.Buffer.g_is_null b} | {
"end_col": 25,
"end_line": 96,
"start_col": 21,
"start_line": 96
} |
FStar.Pervasives.Lemma | val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]] | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let recallable_mgsub #_ #rrel #rel b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel | val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]]
let recallable_mgsub #_ #rrel #rel b i len sub_rel = | false | null | true | match b with
| Null -> ()
| Buffer max_len content idx length ->
lemma_seq_sub_compatibility_is_transitive (U32.v max_len)
rrel
(U32.v idx)
(U32.v idx + U32.v length)
rel
(U32.v i)
(U32.v i + U32.v len)
sub_rel | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.Ghost.erased",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Ghost.reveal",
"LowStar.Monotonic.Buffer.lemma_seq_sub_compatibility_is_transitive",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _
let loc_regions_unused_in = MG.loc_regions_unused_in cls
let loc_unused_in_not_unused_in_disjoint =
MG.loc_unused_in_not_unused_in_disjoint cls
let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls
let live_loc_not_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let unused_in_loc_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.addr_unused_in_does_not_contain_addr h) (frameOf b, as_addr b);
MG.loc_addresses_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let modifies_address_liveness_insensitive_unused_in =
MG.modifies_address_liveness_insensitive_unused_in cls
let modifies_only_not_unused_in = MG.modifies_only_not_unused_in
let mreference_live_loc_not_unused_in =
MG.mreference_live_loc_not_unused_in cls
let mreference_unused_in_loc_unused_in =
MG.mreference_unused_in_loc_unused_in cls
let modifies_loc_unused_in l h1 h2 l' =
modifies_loc_includes address_liveness_insensitive_locs h1 h2 l;
modifies_address_liveness_insensitive_unused_in h1 h2;
loc_includes_trans (loc_unused_in h1) (loc_unused_in h2) l'
let fresh_frame_modifies h0 h1 = MG.fresh_frame_modifies #_ cls h0 h1
let popped_modifies = MG.popped_modifies #_ cls
let modifies_remove_new_locs l_fresh l_aux l_goal h1 h2 h3 =
modifies_only_not_unused_in l_goal h1 h3
let disjoint_neq #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
let empty_disjoint
#t1 #t2 #rrel1 #rel1 #rrel2 #rel2 b1 b2
= let r = frameOf b1 in
let a = as_addr b1 in
if r = frameOf b2 && a = as_addr b2 then
MG.loc_disjoint_aloc_intro #_ #cls #r #a #r #a (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
(*
let includes_live #a #rrel #rel1 #rel2 h larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let includes_frameOf_as_addr #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
let pointer_distinct_sel_disjoint #a #_ #_ #_ #_ b1 b2 h =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2
then begin
HS.mreference_distinct_sel_disjoint h (Buffer?.content b1) (Buffer?.content b2);
loc_disjoint_buffer b1 b2
end
else
loc_disjoint_buffer b1 b2
let is_null #_ #_ #_ b = Null? b
let msub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content i0 len0 ->
Buffer max_len content (U32.add i0 i) len
let moffset #a #rrel #rel sub_rel b i =
match b with
| Null -> Null
| Buffer max_len content i0 len ->
Buffer max_len content (U32.add i0 i) (Ghost.hide ((U32.sub (Ghost.reveal len) i)))
let index #_ #_ #_ b i =
let open HST in
let s = ! (Buffer?.content b) in
Seq.index s (U32.v (Buffer?.idx b) + U32.v i)
let g_upd_seq #_ #_ #_ b s h =
if Seq.length s = 0 then h
else
let s0 = HS.sel h (Buffer?.content b) in
let Buffer _ content idx length = b in
HS.upd h (Buffer?.content b) (Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v length) s)
let lemma_g_upd_with_same_seq #_ #_ #_ b h =
if Null? b then ()
else
let open FStar.UInt32 in
let Buffer _ content idx length = b in
let s = HS.sel h content in
assert (Seq.equal (Seq.replace_subseq s (v idx) (v idx + v length) (Seq.slice s (v idx) (v idx + v length))) s);
HS.lemma_heap_equality_upd_with_sel h (Buffer?.content b)
#push-options "--z3rlimit 48"
let g_upd_seq_as_seq #a #_ #_ b s h =
let h' = g_upd_seq b s h in
if g_is_null b then assert (Seq.equal s Seq.empty)
else begin
assert (Seq.equal (as_seq h' b) s);
// prove modifies_1_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_modifies b h h'
end
let g_upd_modifies_strong #_ #_ #_ b i v h =
let h' = g_upd b i v h in
// prove modifies_1_from_to_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_from_to_modifies b (U32.uint_to_t i) (U32.uint_to_t (i + 1)) h h'
#pop-options
let upd' #_ #_ #_ b i v =
let open HST in
let h = get() in
let Buffer max_length content idx len = b in
let s0 = !content in
let sb0 = Seq.slice s0 (U32.v idx) (U32.v max_length) in
let s_upd = Seq.upd sb0 (U32.v i) v in
let sf = Seq.replace_subseq s0 (U32.v idx) (U32.v max_length) s_upd in
assert (sf `Seq.equal`
Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v len) (Seq.upd (as_seq h b) (U32.v i) v));
content := sf
let recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 =
(not (g_is_null b)) ==> (
HST.is_eternal_region (frameOf b) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_buf #_ #_ #_ b =
(not (g_is_null b)) ==> (
HS.is_heap_color (HS.color (frameOf b)) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_sub #a #rrel #rel #subrel b0 b1 =
match b1 with
| Null -> ()
| Buffer max_len content idx length ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j subrel)
let recallable_null #_ #_ #_ = () | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val recallable_mgsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel /\ recallable b))
(ensures (recallable (mgsub sub_rel b i len)))
[SMTPatOr [
[SMTPat (recallable (mgsub sub_rel b i len))];
[SMTPat (recallable b); SMTPat (mgsub sub_rel b i len);]
]] | [] | LowStar.Monotonic.Buffer.recallable_mgsub | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
i: FStar.UInt32.t ->
len: FStar.UInt32.t ->
sub_rel: LowStar.Monotonic.Buffer.srel a
-> FStar.Pervasives.Lemma
(requires
FStar.UInt32.v i + FStar.UInt32.v len <= LowStar.Monotonic.Buffer.length b /\
LowStar.Monotonic.Buffer.compatible_sub b i len sub_rel /\
LowStar.Monotonic.Buffer.recallable b)
(ensures LowStar.Monotonic.Buffer.recallable (LowStar.Monotonic.Buffer.mgsub sub_rel b i len))
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.recallable (LowStar.Monotonic.Buffer.mgsub sub_rel
b
i
len))
];
[
SMTPat (LowStar.Monotonic.Buffer.recallable b);
SMTPat (LowStar.Monotonic.Buffer.mgsub sub_rel b i len)
]
]
] | {
"end_col": 65,
"end_line": 1388,
"start_col": 2,
"start_line": 1383
} |
Prims.GTot | val buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel | val buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 = | false | null | false | match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel (U32.v idx) (U32.v idx + U32.v length) rel | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_True",
"FStar.UInt32.t",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.Ghost.erased",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Ghost.reveal",
"LowStar.Monotonic.Buffer.compatible_sub_preorder"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 | [] | LowStar.Monotonic.Buffer.buffer_compatible | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer t rrel rel -> Prims.GTot Type0 | {
"end_col": 50,
"end_line": 110,
"start_col": 2,
"start_line": 106
} |
Prims.Pure | val coerce (t2 #t1: Type) (x1: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun y -> y == x1)) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let coerce (t2: Type) (#t1: Type) (x1: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun y -> y == x1)) = x1 | val coerce (t2 #t1: Type) (x1: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun y -> y == x1))
let coerce (t2 #t1: Type) (x1: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun y -> y == x1)) = | false | null | false | x1 | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [] | [
"Prims.eq2"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _
let loc_regions_unused_in = MG.loc_regions_unused_in cls
let loc_unused_in_not_unused_in_disjoint =
MG.loc_unused_in_not_unused_in_disjoint cls
let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls
let live_loc_not_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let unused_in_loc_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.addr_unused_in_does_not_contain_addr h) (frameOf b, as_addr b);
MG.loc_addresses_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let modifies_address_liveness_insensitive_unused_in =
MG.modifies_address_liveness_insensitive_unused_in cls
let modifies_only_not_unused_in = MG.modifies_only_not_unused_in
let mreference_live_loc_not_unused_in =
MG.mreference_live_loc_not_unused_in cls
let mreference_unused_in_loc_unused_in =
MG.mreference_unused_in_loc_unused_in cls
let modifies_loc_unused_in l h1 h2 l' =
modifies_loc_includes address_liveness_insensitive_locs h1 h2 l;
modifies_address_liveness_insensitive_unused_in h1 h2;
loc_includes_trans (loc_unused_in h1) (loc_unused_in h2) l'
let fresh_frame_modifies h0 h1 = MG.fresh_frame_modifies #_ cls h0 h1
let popped_modifies = MG.popped_modifies #_ cls
let modifies_remove_new_locs l_fresh l_aux l_goal h1 h2 h3 =
modifies_only_not_unused_in l_goal h1 h3
let disjoint_neq #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
let empty_disjoint
#t1 #t2 #rrel1 #rel1 #rrel2 #rel2 b1 b2
= let r = frameOf b1 in
let a = as_addr b1 in
if r = frameOf b2 && a = as_addr b2 then
MG.loc_disjoint_aloc_intro #_ #cls #r #a #r #a (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
(*
let includes_live #a #rrel #rel1 #rel2 h larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let includes_frameOf_as_addr #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
let pointer_distinct_sel_disjoint #a #_ #_ #_ #_ b1 b2 h =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2
then begin
HS.mreference_distinct_sel_disjoint h (Buffer?.content b1) (Buffer?.content b2);
loc_disjoint_buffer b1 b2
end
else
loc_disjoint_buffer b1 b2
let is_null #_ #_ #_ b = Null? b
let msub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content i0 len0 ->
Buffer max_len content (U32.add i0 i) len
let moffset #a #rrel #rel sub_rel b i =
match b with
| Null -> Null
| Buffer max_len content i0 len ->
Buffer max_len content (U32.add i0 i) (Ghost.hide ((U32.sub (Ghost.reveal len) i)))
let index #_ #_ #_ b i =
let open HST in
let s = ! (Buffer?.content b) in
Seq.index s (U32.v (Buffer?.idx b) + U32.v i)
let g_upd_seq #_ #_ #_ b s h =
if Seq.length s = 0 then h
else
let s0 = HS.sel h (Buffer?.content b) in
let Buffer _ content idx length = b in
HS.upd h (Buffer?.content b) (Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v length) s)
let lemma_g_upd_with_same_seq #_ #_ #_ b h =
if Null? b then ()
else
let open FStar.UInt32 in
let Buffer _ content idx length = b in
let s = HS.sel h content in
assert (Seq.equal (Seq.replace_subseq s (v idx) (v idx + v length) (Seq.slice s (v idx) (v idx + v length))) s);
HS.lemma_heap_equality_upd_with_sel h (Buffer?.content b)
#push-options "--z3rlimit 48"
let g_upd_seq_as_seq #a #_ #_ b s h =
let h' = g_upd_seq b s h in
if g_is_null b then assert (Seq.equal s Seq.empty)
else begin
assert (Seq.equal (as_seq h' b) s);
// prove modifies_1_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_modifies b h h'
end
let g_upd_modifies_strong #_ #_ #_ b i v h =
let h' = g_upd b i v h in
// prove modifies_1_from_to_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_from_to_modifies b (U32.uint_to_t i) (U32.uint_to_t (i + 1)) h h'
#pop-options
let upd' #_ #_ #_ b i v =
let open HST in
let h = get() in
let Buffer max_length content idx len = b in
let s0 = !content in
let sb0 = Seq.slice s0 (U32.v idx) (U32.v max_length) in
let s_upd = Seq.upd sb0 (U32.v i) v in
let sf = Seq.replace_subseq s0 (U32.v idx) (U32.v max_length) s_upd in
assert (sf `Seq.equal`
Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v len) (Seq.upd (as_seq h b) (U32.v i) v));
content := sf
let recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 =
(not (g_is_null b)) ==> (
HST.is_eternal_region (frameOf b) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_buf #_ #_ #_ b =
(not (g_is_null b)) ==> (
HS.is_heap_color (HS.color (frameOf b)) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_sub #a #rrel #rel #subrel b0 b1 =
match b1 with
| Null -> ()
| Buffer max_len content idx length ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j subrel)
let recallable_null #_ #_ #_ = ()
let recallable_mgsub #_ #rrel #rel b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
(*
let recallable_includes #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let recall #_ #_ #_ b = if Null? b then () else HST.recall (Buffer?.content b)
private let spred_as_mempred (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.mem_predicate
= fun h ->
buffer_compatible b ==>
p (as_seq h b)
let witnessed #_ #rrel #rel b p =
match b with
| Null -> p Seq.empty
| Buffer max_length content idx length ->
HST.token_p content (spred_as_mempred b p)
private let lemma_stable_on_rel_is_stable_on_rrel (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:spred a)
:Lemma (requires (Buffer? b /\ stable_on p rel))
(ensures (HST.stable_on (spred_as_mempred b p) (Buffer?.content b)))
= let Buffer max_length content idx length = b in
let mp = spred_as_mempred b p in
let aux (h0 h1:HS.mem) :Lemma ((mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content)) ==> mp h1)
= Classical.arrow_to_impl #(mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content) /\ buffer_compatible b) #(mp h1)
(fun _ -> assert (rel (as_seq h0 b) (as_seq h1 b)))
in
Classical.forall_intro_2 aux
let witness_p #a #rrel #rel b p =
match b with
| Null -> ()
| Buffer _ content _ _ ->
lemma_stable_on_rel_is_stable_on_rrel b p;
//AR: TODO: the proof doesn't go through without this assertion, which should follow directly from the lemma call
assert (HST.stable_on #(Seq.lseq a (U32.v (Buffer?.max_length b))) #(srel_to_lsrel (U32.v (Buffer?.max_length b)) rrel) (spred_as_mempred b p) (Buffer?.content b));
HST.witness_p content (spred_as_mempred b p)
let recall_p #_ #_ #_ b p =
match b with
| Null -> ()
| Buffer _ content _ _ -> HST.recall_p content (spred_as_mempred b p)
let witnessed_functorial #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 =
match b1, b2 with
| Null, Null -> assert (as_seq HS.empty_mem b1 == Seq.empty)
| Buffer _ content _ _, _ ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j rel1);
HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2)
let witnessed_functorial_st #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 =
match b1, b2 with
| Null, Null -> ()
| Buffer _ content _ _, _ ->
HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2)
let freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) =
(not (g_is_null b)) /\
HS.is_mm (Buffer?.content b) /\
HS.is_heap_color (HS.color (frameOf b)) /\
U32.v (Buffer?.max_length b) > 0 /\
Buffer?.idx b == 0ul /\
Ghost.reveal (Buffer?.length b) == Buffer?.max_length b
let free #_ #_ #_ b = HST.rfree (Buffer?.content b)
let freeable_length #_ #_ #_ b = ()
let freeable_disjoint #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
private let alloc_heap_common (#a:Type0) (#rrel:srel a)
(r:HST.erid) (len:U32.t{U32.v len > 0}) (s:Seq.seq a{Seq.length s == U32.v len})
(mm:bool)
:HST.ST (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> True))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 s /\
frameOf b == r /\
HS.is_mm (Buffer?.content b) == mm /\
Buffer?.idx b == 0ul /\
Ghost.reveal (Buffer?.length b) == Buffer?.max_length b))
= lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
if mm then HST.ralloc_mm r s else HST.ralloc r s
in
let b = Buffer len content 0ul (Ghost.hide len) in
b
let mgcmalloc #_ #_ r init len =
alloc_heap_common r len (Seq.create (U32.v len) init) false
private let read_sub_buffer (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (idx len:U32.t)
: HST.ST (Seq.seq a)
(requires fun h0 ->
live h0 b /\ U32.v len > 0 /\
U32.v idx + U32.v len <= length b)
(ensures fun h0 s h1 ->
h0 == h1 /\
s == Seq.slice (as_seq h0 b) (U32.v idx) (U32.v idx + U32.v len))
= let open HST in
let s = ! (Buffer?.content b) in //the whole allocation unit
let s = Seq.slice s (U32.v (Buffer?.idx b))
(U32.v (Buffer?.max_length b)) in //b buffer
Seq.slice s (U32.v idx) (U32.v idx + U32.v len) //slice of b
let mgcmalloc_and_blit #_ #_ r #_ #_ src id_src len =
alloc_heap_common r len (read_sub_buffer src id_src len) false
let mmalloc #_ #_ r init len =
alloc_heap_common r len (Seq.create (U32.v len) init) true
let mmalloc_and_blit #_ #_ r #_ #_ src id_src len =
alloc_heap_common r len (read_sub_buffer src id_src len) true
let malloca #a #rrel init len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.salloc (Seq.create (U32.v len) init)
in
Buffer len content 0ul (Ghost.hide len)
let malloca_and_blit #a #rrel #_ #_ src id_src len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.salloc (read_sub_buffer src id_src len)
in
Buffer len content 0ul (Ghost.hide len)
let malloca_of_list #a #rrel init =
let len = U32.uint_to_t (FStar.List.Tot.length init) in
let s = Seq.seq_of_list init in
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.salloc s
in
Buffer len content 0ul (Ghost.hide len)
let mgcmalloc_of_list #a #rrel r init =
let len = U32.uint_to_t (FStar.List.Tot.length init) in
let s = Seq.seq_of_list init in
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc r s
in
Buffer len content 0ul (Ghost.hide len)
let mmalloc_drgn #a #rrel d init len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content : HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc_drgn d (Seq.create (U32.v len) init)
in
Buffer len content 0ul len
let mmalloc_drgn_mm #a #rrel d init len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content : HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc_drgn_mm d (Seq.create (U32.v len) init)
in
Buffer len content 0ul len
let mmalloc_drgn_and_blit #a #rrel #_ #_ d src id_src len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc_drgn d (read_sub_buffer src id_src len)
in
Buffer len content 0ul len
#push-options "--max_fuel 0 --initial_ifuel 1 --max_ifuel 1 --z3rlimit 64"
let blit #a #rrel1 #rrel2 #rel1 #rel2 src idx_src dst idx_dst len =
let open HST in
match src, dst with
| Buffer _ _ _ _, Buffer _ _ _ _ ->
if len = 0ul then ()
else
let h = get () in
let Buffer max_length1 content1 idx1 length1 = src in
let Buffer max_length2 content2 idx2 length2 = dst in
let s_full1 = !content1 in
let s_full2 = !content2 in
let s1 = Seq.slice s_full1 (U32.v idx1) (U32.v max_length1) in
let s2 = Seq.slice s_full2 (U32.v idx2) (U32.v max_length2) in
let s_sub_src = Seq.slice s1 (U32.v idx_src) (U32.v idx_src + U32.v len) in
let s2' = Seq.replace_subseq s2 (U32.v idx_dst) (U32.v idx_dst + U32.v len) s_sub_src in
let s_full2' = Seq.replace_subseq s_full2 (U32.v idx2) (U32.v max_length2) s2' in
assert (Seq.equal (Seq.slice s2' (U32.v idx_dst) (U32.v idx_dst + U32.v len)) s_sub_src);
assert (Seq.equal (Seq.slice s2' 0 (U32.v idx_dst)) (Seq.slice s2 0 (U32.v idx_dst)));
assert (Seq.equal (Seq.slice s2' (U32.v idx_dst + U32.v len) (length dst))
(Seq.slice s2 (U32.v idx_dst + U32.v len) (length dst)));
// AF: Needed to trigger the preorder relation. A bit verbose because the second sequence
// has a ghost computation (U32.v (Ghost.reveal length))
assert (s_full2' `Seq.equal`
Seq.replace_subseq s_full2
(U32.v idx2)
(U32.v idx2 + U32.v length2)
(Seq.replace_subseq (as_seq h dst)
(U32.v idx_dst)
(U32.v idx_dst + U32.v len)
(Seq.slice (as_seq h src)
(U32.v idx_src)
(U32.v idx_src + U32.v len)
)
)
);
content2 := s_full2';
let h1 = get () in
assert (s_full2' `Seq.equal` Seq.replace_subseq s_full2 (U32.v idx2) (U32.v idx2 + U32.v length2) (Seq.slice s2' 0 (U32.v length2)));
assert (h1 == g_upd_seq dst (Seq.slice s2' 0 (U32.v length2)) h);
g_upd_seq_as_seq dst (Seq.slice s2' 0 (U32.v length2)) h //for modifies clause
| _, _ -> ()
#push-options "--z3rlimit 128 --max_fuel 0 --max_ifuel 1 --initial_ifuel 1 --z3cliopt smt.qi.EAGER_THRESHOLD=4"
let fill' (#t:Type) (#rrel #rel: srel t)
(b: mbuffer t rrel rel)
(z:t)
(len:U32.t)
: HST.Stack unit
(requires (fun h ->
live h b /\
U32.v len <= length b /\
rel (as_seq h b) (Seq.replace_subseq (as_seq h b) 0 (U32.v len) (Seq.create (U32.v len) z))
))
(ensures (fun h0 _ h1 ->
modifies (loc_buffer b) h0 h1 /\
live h1 b /\
Seq.slice (as_seq h1 b) 0 (U32.v len) `Seq.equal` Seq.create (U32.v len) z /\
Seq.slice (as_seq h1 b) (U32.v len) (length b) `Seq.equal` Seq.slice (as_seq h0 b) (U32.v len) (length b)
))
= let open HST in
if len = 0ul then ()
else begin
let h = get () in
let Buffer max_length content idx length = b in
let s_full = !content in
let s = Seq.slice s_full (U32.v idx) (U32.v max_length) in
let s_src = Seq.create (U32.v len) z in
let s' = Seq.replace_subseq s 0 (U32.v len) s_src in
let s_full' = Seq.replace_subseq s_full (U32.v idx) (U32.v idx + U32.v len) s_src in
// AF: Needed to trigger the preorder relation. A bit verbose because the second sequence
// has a ghost computation (U32.v (Ghost.reveal length))
assert (s_full' `Seq.equal` Seq.replace_subseq s_full (U32.v idx) (U32.v idx + U32.v length) (Seq.replace_subseq (Seq.slice s_full (U32.v idx) (U32.v idx + U32.v length)) 0 (U32.v len) s_src));
content := s_full';
let h' = HST.get () in
assert (s_full' `Seq.equal` Seq.replace_subseq s_full (U32.v idx) (U32.v idx + U32.v length) (Seq.slice s' 0 (U32.v length)));
assert (h' == g_upd_seq b (Seq.slice s' 0 (U32.v length)) h);
g_upd_seq_as_seq b (Seq.slice s' 0 (U32.v length)) h //for modifies clause
end
#pop-options
let fill #t #rrel #rel b z len = fill' b z len
let abuffer' = ubuffer' | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 64,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val coerce (t2 #t1: Type) (x1: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun y -> y == x1)) | [] | LowStar.Monotonic.Buffer.coerce | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | t2: Type -> x1: t1 -> Prims.Pure t2 | {
"end_col": 108,
"end_line": 1652,
"start_col": 106,
"start_line": 1652
} |
FStar.Pervasives.Lemma | val lemma_seq_sub_compatilibity_is_reflexive (#a: Type0) (len: nat) (rel: srel a)
: Lemma (compatible_sub_preorder len rel 0 len rel) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2) | val lemma_seq_sub_compatilibity_is_reflexive (#a: Type0) (len: nat) (rel: srel a)
: Lemma (compatible_sub_preorder len rel 0 len rel)
let lemma_seq_sub_compatilibity_is_reflexive (#a: Type0) (len: nat) (rel: srel a)
: Lemma (compatible_sub_preorder len rel 0 len rel) = | false | null | true | assert (forall (s1: Seq.seq a) (s2: Seq.seq a).
Seq.length s1 == Seq.length s2 ==> Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"Prims.nat",
"LowStar.Monotonic.Buffer.srel",
"Prims._assert",
"Prims.l_Forall",
"FStar.Seq.Base.seq",
"Prims.l_imp",
"Prims.eq2",
"FStar.Seq.Base.length",
"FStar.Seq.Base.equal",
"FStar.Seq.Properties.replace_subseq",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"LowStar.Monotonic.Buffer.compatible_sub_preorder",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lemma_seq_sub_compatilibity_is_reflexive (#a: Type0) (len: nat) (rel: srel a)
: Lemma (compatible_sub_preorder len rel 0 len rel) | [] | LowStar.Monotonic.Buffer.lemma_seq_sub_compatilibity_is_reflexive | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | len: Prims.nat -> rel: LowStar.Monotonic.Buffer.srel a
-> FStar.Pervasives.Lemma
(ensures LowStar.Monotonic.Buffer.compatible_sub_preorder len rel 0 len rel) | {
"end_col": 91,
"end_line": 44,
"start_col": 4,
"start_line": 43
} |
Prims.GTot | val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h | val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0
let unused_in #_ #_ #_ b h = | false | null | false | match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_False",
"FStar.UInt32.t",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.Ghost.erased",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Ghost.reveal",
"FStar.Monotonic.HyperStack.unused_in"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = () | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val unused_in (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem) :GTot Type0 | [] | LowStar.Monotonic.Buffer.unused_in | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> h: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 52,
"end_line": 103,
"start_col": 2,
"start_line": 101
} |
FStar.HyperStack.ST.ST | val mmalloc_drgn (#a:Type0) (#rrel:srel a)
(d:HST.drgn) (init:a) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b})
(requires fun h -> alloc_drgn_pre h d len)
(ensures fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mmalloc_drgn #a #rrel d init len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content : HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc_drgn d (Seq.create (U32.v len) init)
in
Buffer len content 0ul len | val mmalloc_drgn (#a:Type0) (#rrel:srel a)
(d:HST.drgn) (init:a) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b})
(requires fun h -> alloc_drgn_pre h d len)
(ensures fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init))
let mmalloc_drgn #a #rrel d init len = | true | null | false | lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content:HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc_drgn d (Seq.create (U32.v len) init)
in
Buffer len content 0ul len | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.HyperStack.ST.drgn",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.Buffer",
"FStar.UInt32.__uint_to_t",
"FStar.Ghost.hide",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.UInt32.v",
"Prims.l_and",
"Prims.eq2",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"FStar.HyperStack.ST.rid_of_drgn",
"LowStar.Monotonic.Buffer.region_lifetime_buf",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.HyperStack.ST.ralloc_drgn",
"FStar.Seq.Base.create",
"Prims.unit",
"LowStar.Monotonic.Buffer.lemma_seq_sub_compatilibity_is_reflexive"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _
let loc_regions_unused_in = MG.loc_regions_unused_in cls
let loc_unused_in_not_unused_in_disjoint =
MG.loc_unused_in_not_unused_in_disjoint cls
let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls
let live_loc_not_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let unused_in_loc_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.addr_unused_in_does_not_contain_addr h) (frameOf b, as_addr b);
MG.loc_addresses_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let modifies_address_liveness_insensitive_unused_in =
MG.modifies_address_liveness_insensitive_unused_in cls
let modifies_only_not_unused_in = MG.modifies_only_not_unused_in
let mreference_live_loc_not_unused_in =
MG.mreference_live_loc_not_unused_in cls
let mreference_unused_in_loc_unused_in =
MG.mreference_unused_in_loc_unused_in cls
let modifies_loc_unused_in l h1 h2 l' =
modifies_loc_includes address_liveness_insensitive_locs h1 h2 l;
modifies_address_liveness_insensitive_unused_in h1 h2;
loc_includes_trans (loc_unused_in h1) (loc_unused_in h2) l'
let fresh_frame_modifies h0 h1 = MG.fresh_frame_modifies #_ cls h0 h1
let popped_modifies = MG.popped_modifies #_ cls
let modifies_remove_new_locs l_fresh l_aux l_goal h1 h2 h3 =
modifies_only_not_unused_in l_goal h1 h3
let disjoint_neq #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
let empty_disjoint
#t1 #t2 #rrel1 #rel1 #rrel2 #rel2 b1 b2
= let r = frameOf b1 in
let a = as_addr b1 in
if r = frameOf b2 && a = as_addr b2 then
MG.loc_disjoint_aloc_intro #_ #cls #r #a #r #a (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
(*
let includes_live #a #rrel #rel1 #rel2 h larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let includes_frameOf_as_addr #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
let pointer_distinct_sel_disjoint #a #_ #_ #_ #_ b1 b2 h =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2
then begin
HS.mreference_distinct_sel_disjoint h (Buffer?.content b1) (Buffer?.content b2);
loc_disjoint_buffer b1 b2
end
else
loc_disjoint_buffer b1 b2
let is_null #_ #_ #_ b = Null? b
let msub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content i0 len0 ->
Buffer max_len content (U32.add i0 i) len
let moffset #a #rrel #rel sub_rel b i =
match b with
| Null -> Null
| Buffer max_len content i0 len ->
Buffer max_len content (U32.add i0 i) (Ghost.hide ((U32.sub (Ghost.reveal len) i)))
let index #_ #_ #_ b i =
let open HST in
let s = ! (Buffer?.content b) in
Seq.index s (U32.v (Buffer?.idx b) + U32.v i)
let g_upd_seq #_ #_ #_ b s h =
if Seq.length s = 0 then h
else
let s0 = HS.sel h (Buffer?.content b) in
let Buffer _ content idx length = b in
HS.upd h (Buffer?.content b) (Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v length) s)
let lemma_g_upd_with_same_seq #_ #_ #_ b h =
if Null? b then ()
else
let open FStar.UInt32 in
let Buffer _ content idx length = b in
let s = HS.sel h content in
assert (Seq.equal (Seq.replace_subseq s (v idx) (v idx + v length) (Seq.slice s (v idx) (v idx + v length))) s);
HS.lemma_heap_equality_upd_with_sel h (Buffer?.content b)
#push-options "--z3rlimit 48"
let g_upd_seq_as_seq #a #_ #_ b s h =
let h' = g_upd_seq b s h in
if g_is_null b then assert (Seq.equal s Seq.empty)
else begin
assert (Seq.equal (as_seq h' b) s);
// prove modifies_1_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_modifies b h h'
end
let g_upd_modifies_strong #_ #_ #_ b i v h =
let h' = g_upd b i v h in
// prove modifies_1_from_to_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_from_to_modifies b (U32.uint_to_t i) (U32.uint_to_t (i + 1)) h h'
#pop-options
let upd' #_ #_ #_ b i v =
let open HST in
let h = get() in
let Buffer max_length content idx len = b in
let s0 = !content in
let sb0 = Seq.slice s0 (U32.v idx) (U32.v max_length) in
let s_upd = Seq.upd sb0 (U32.v i) v in
let sf = Seq.replace_subseq s0 (U32.v idx) (U32.v max_length) s_upd in
assert (sf `Seq.equal`
Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v len) (Seq.upd (as_seq h b) (U32.v i) v));
content := sf
let recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 =
(not (g_is_null b)) ==> (
HST.is_eternal_region (frameOf b) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_buf #_ #_ #_ b =
(not (g_is_null b)) ==> (
HS.is_heap_color (HS.color (frameOf b)) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_sub #a #rrel #rel #subrel b0 b1 =
match b1 with
| Null -> ()
| Buffer max_len content idx length ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j subrel)
let recallable_null #_ #_ #_ = ()
let recallable_mgsub #_ #rrel #rel b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
(*
let recallable_includes #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let recall #_ #_ #_ b = if Null? b then () else HST.recall (Buffer?.content b)
private let spred_as_mempred (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.mem_predicate
= fun h ->
buffer_compatible b ==>
p (as_seq h b)
let witnessed #_ #rrel #rel b p =
match b with
| Null -> p Seq.empty
| Buffer max_length content idx length ->
HST.token_p content (spred_as_mempred b p)
private let lemma_stable_on_rel_is_stable_on_rrel (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:spred a)
:Lemma (requires (Buffer? b /\ stable_on p rel))
(ensures (HST.stable_on (spred_as_mempred b p) (Buffer?.content b)))
= let Buffer max_length content idx length = b in
let mp = spred_as_mempred b p in
let aux (h0 h1:HS.mem) :Lemma ((mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content)) ==> mp h1)
= Classical.arrow_to_impl #(mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content) /\ buffer_compatible b) #(mp h1)
(fun _ -> assert (rel (as_seq h0 b) (as_seq h1 b)))
in
Classical.forall_intro_2 aux
let witness_p #a #rrel #rel b p =
match b with
| Null -> ()
| Buffer _ content _ _ ->
lemma_stable_on_rel_is_stable_on_rrel b p;
//AR: TODO: the proof doesn't go through without this assertion, which should follow directly from the lemma call
assert (HST.stable_on #(Seq.lseq a (U32.v (Buffer?.max_length b))) #(srel_to_lsrel (U32.v (Buffer?.max_length b)) rrel) (spred_as_mempred b p) (Buffer?.content b));
HST.witness_p content (spred_as_mempred b p)
let recall_p #_ #_ #_ b p =
match b with
| Null -> ()
| Buffer _ content _ _ -> HST.recall_p content (spred_as_mempred b p)
let witnessed_functorial #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 =
match b1, b2 with
| Null, Null -> assert (as_seq HS.empty_mem b1 == Seq.empty)
| Buffer _ content _ _, _ ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j rel1);
HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2)
let witnessed_functorial_st #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 =
match b1, b2 with
| Null, Null -> ()
| Buffer _ content _ _, _ ->
HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2)
let freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) =
(not (g_is_null b)) /\
HS.is_mm (Buffer?.content b) /\
HS.is_heap_color (HS.color (frameOf b)) /\
U32.v (Buffer?.max_length b) > 0 /\
Buffer?.idx b == 0ul /\
Ghost.reveal (Buffer?.length b) == Buffer?.max_length b
let free #_ #_ #_ b = HST.rfree (Buffer?.content b)
let freeable_length #_ #_ #_ b = ()
let freeable_disjoint #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
private let alloc_heap_common (#a:Type0) (#rrel:srel a)
(r:HST.erid) (len:U32.t{U32.v len > 0}) (s:Seq.seq a{Seq.length s == U32.v len})
(mm:bool)
:HST.ST (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> True))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 s /\
frameOf b == r /\
HS.is_mm (Buffer?.content b) == mm /\
Buffer?.idx b == 0ul /\
Ghost.reveal (Buffer?.length b) == Buffer?.max_length b))
= lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
if mm then HST.ralloc_mm r s else HST.ralloc r s
in
let b = Buffer len content 0ul (Ghost.hide len) in
b
let mgcmalloc #_ #_ r init len =
alloc_heap_common r len (Seq.create (U32.v len) init) false
private let read_sub_buffer (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (idx len:U32.t)
: HST.ST (Seq.seq a)
(requires fun h0 ->
live h0 b /\ U32.v len > 0 /\
U32.v idx + U32.v len <= length b)
(ensures fun h0 s h1 ->
h0 == h1 /\
s == Seq.slice (as_seq h0 b) (U32.v idx) (U32.v idx + U32.v len))
= let open HST in
let s = ! (Buffer?.content b) in //the whole allocation unit
let s = Seq.slice s (U32.v (Buffer?.idx b))
(U32.v (Buffer?.max_length b)) in //b buffer
Seq.slice s (U32.v idx) (U32.v idx + U32.v len) //slice of b
let mgcmalloc_and_blit #_ #_ r #_ #_ src id_src len =
alloc_heap_common r len (read_sub_buffer src id_src len) false
let mmalloc #_ #_ r init len =
alloc_heap_common r len (Seq.create (U32.v len) init) true
let mmalloc_and_blit #_ #_ r #_ #_ src id_src len =
alloc_heap_common r len (read_sub_buffer src id_src len) true
let malloca #a #rrel init len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.salloc (Seq.create (U32.v len) init)
in
Buffer len content 0ul (Ghost.hide len)
let malloca_and_blit #a #rrel #_ #_ src id_src len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.salloc (read_sub_buffer src id_src len)
in
Buffer len content 0ul (Ghost.hide len)
let malloca_of_list #a #rrel init =
let len = U32.uint_to_t (FStar.List.Tot.length init) in
let s = Seq.seq_of_list init in
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.salloc s
in
Buffer len content 0ul (Ghost.hide len)
let mgcmalloc_of_list #a #rrel r init =
let len = U32.uint_to_t (FStar.List.Tot.length init) in
let s = Seq.seq_of_list init in
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc r s
in
Buffer len content 0ul (Ghost.hide len) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mmalloc_drgn (#a:Type0) (#rrel:srel a)
(d:HST.drgn) (init:a) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b})
(requires fun h -> alloc_drgn_pre h d len)
(ensures fun h0 b h1 -> alloc_post_mem_common b h0 h1 (Seq.create (U32.v len) init)) | [] | LowStar.Monotonic.Buffer.mmalloc_drgn | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | d: FStar.HyperStack.ST.drgn -> init: a -> len: FStar.UInt32.t
-> FStar.HyperStack.ST.ST
(b:
LowStar.Monotonic.Buffer.lmbuffer a rrel rrel (FStar.UInt32.v len)
{ LowStar.Monotonic.Buffer.frameOf b == FStar.HyperStack.ST.rid_of_drgn d /\
LowStar.Monotonic.Buffer.region_lifetime_buf b }) | {
"end_col": 28,
"end_line": 1547,
"start_col": 2,
"start_line": 1543
} |
FStar.HyperStack.ST.ST | val mmalloc_drgn_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a)
(d:HST.drgn) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b})
(requires fun h ->
alloc_drgn_pre h d len /\
live h src /\
U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len))) | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mmalloc_drgn_and_blit #a #rrel #_ #_ d src id_src len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc_drgn d (read_sub_buffer src id_src len)
in
Buffer len content 0ul len | val mmalloc_drgn_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a)
(d:HST.drgn) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b})
(requires fun h ->
alloc_drgn_pre h d len /\
live h src /\
U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len)))
let mmalloc_drgn_and_blit #a #rrel #_ #_ d src id_src len = | true | null | false | lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content:HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc_drgn d (read_sub_buffer src id_src len)
in
Buffer len content 0ul len | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.HyperStack.ST.drgn",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.Buffer",
"FStar.UInt32.__uint_to_t",
"FStar.Ghost.hide",
"LowStar.Monotonic.Buffer.lmbuffer",
"FStar.UInt32.v",
"Prims.l_and",
"Prims.eq2",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"FStar.HyperStack.ST.rid_of_drgn",
"LowStar.Monotonic.Buffer.region_lifetime_buf",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.HyperStack.ST.ralloc_drgn",
"LowStar.Monotonic.Buffer.read_sub_buffer",
"FStar.Seq.Base.seq",
"Prims.unit",
"LowStar.Monotonic.Buffer.lemma_seq_sub_compatilibity_is_reflexive"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions
let loc_includes = MG.loc_includes
let loc_includes_refl = MG.loc_includes_refl
let loc_includes_trans = MG.loc_includes_trans
let loc_includes_union_r = MG.loc_includes_union_r
let loc_includes_union_l = MG.loc_includes_union_l
let loc_includes_none = MG.loc_includes_none
val loc_includes_buffer (#a:Type0) (#rrel1:srel a) (#rrel2:srel a) (#rel1:srel a) (#rel2:srel a)
(b1:mbuffer a rrel1 rel1) (b2:mbuffer a rrel2 rel2)
:Lemma (requires (frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\
ubuffer_includes0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_includes (loc_buffer b1) (loc_buffer b2)))
let loc_includes_buffer #t #_ #_ #_ #_ b1 b2 =
let t1 = ubuffer (frameOf b1) (as_addr b1) in
MG.loc_includes_aloc #_ #cls #(frameOf b1) #(as_addr b1) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_includes_gsub_buffer_r l #_ #_ #_ b i len sub_rel =
let b' = mgsub sub_rel b i len in
loc_includes_buffer b b';
loc_includes_trans l (loc_buffer b) (loc_buffer b')
let loc_includes_gsub_buffer_l #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let b1 = mgsub sub_rel1 b i1 len1 in
let b2 = mgsub sub_rel2 b i2 len2 in
loc_includes_buffer b1 b2
let loc_includes_loc_buffer_loc_buffer_from_to #_ #_ #_ b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) (ubuffer_of_buffer_from_to b from to)
let loc_includes_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_includes_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
#push-options "--z3rlimit 20"
let loc_includes_as_seq #_ #rrel #_ #_ h1 h2 larger smaller =
if Null? smaller then () else
if Null? larger then begin
MG.loc_includes_none_elim (loc_buffer smaller);
MG.loc_of_aloc_not_none #_ #cls #(frameOf smaller) #(as_addr smaller) (ubuffer_of_buffer smaller)
end else begin
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller);
let ul = Ghost.reveal (ubuffer_of_buffer larger) in
let us = Ghost.reveal (ubuffer_of_buffer smaller) in
assert (as_seq h1 smaller == Seq.slice (as_seq h1 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller));
assert (as_seq h2 smaller == Seq.slice (as_seq h2 larger) (us.b_offset - ul.b_offset) (us.b_offset - ul.b_offset + length smaller))
end
#pop-options
let loc_includes_addresses_buffer #a #rrel #srel preserve_liveness r s p =
MG.loc_includes_addresses_aloc #_ #cls preserve_liveness r s #(as_addr p) (ubuffer_of_buffer p)
let loc_includes_region_buffer #_ #_ #_ preserve_liveness s b =
MG.loc_includes_region_aloc #_ #cls preserve_liveness s #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_includes_region_addresses = MG.loc_includes_region_addresses #_ #cls
let loc_includes_region_region = MG.loc_includes_region_region #_ #cls
let loc_includes_region_union_l = MG.loc_includes_region_union_l
let loc_includes_addresses_addresses = MG.loc_includes_addresses_addresses cls
let loc_disjoint = MG.loc_disjoint
let loc_disjoint_sym = MG.loc_disjoint_sym
let loc_disjoint_none_r = MG.loc_disjoint_none_r
let loc_disjoint_union_r = MG.loc_disjoint_union_r
let loc_disjoint_includes = MG.loc_disjoint_includes
val loc_disjoint_buffer (#a1 #a2:Type0) (#rrel1 #rel1:srel a1) (#rrel2 #rel2:srel a2)
(b1:mbuffer a1 rrel1 rel1) (b2:mbuffer a2 rrel2 rel2)
:Lemma (requires ((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2) ==>
ubuffer_disjoint0 #(frameOf b1) #(frameOf b2) #(as_addr b1) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)))
(ensures (loc_disjoint (loc_buffer b1) (loc_buffer b2)))
let loc_disjoint_buffer #_ #_ #_ #_ #_ #_ b1 b2 =
MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
let loc_disjoint_gsub_buffer #_ #_ #_ b i1 len1 sub_rel1 i2 len2 sub_rel2 =
loc_disjoint_buffer (mgsub sub_rel1 b i1 len1) (mgsub sub_rel2 b i2 len2)
let loc_disjoint_loc_buffer_from_to #_ #_ #_ b from1 to1 from2 to2 =
if ubuffer_of_buffer_from_to_none_cond b from1 to1 || ubuffer_of_buffer_from_to_none_cond b from2 to2
then ()
else MG.loc_disjoint_aloc_intro #_ #cls #(frameOf b) #(as_addr b) #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from1 to1) (ubuffer_of_buffer_from_to b from2 to2)
let loc_disjoint_addresses = MG.loc_disjoint_addresses_intro #_ #cls
let loc_disjoint_regions = MG.loc_disjoint_regions #_ #cls
let modifies = MG.modifies
let modifies_live_region = MG.modifies_live_region
let modifies_mreference_elim = MG.modifies_mreference_elim
let modifies_buffer_elim #_ #_ #_ b p h h' =
if g_is_null b
then
assert (as_seq h b `Seq.equal` as_seq h' b)
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) p h h' ;
ubuffer_preserved_elim b h h'
end
let modifies_buffer_from_to_elim #_ #_ #_ b from to p h h' =
if g_is_null b
then ()
else begin
MG.modifies_aloc_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) p h h' ;
ubuffer_preserved_from_to_elim b from to h h'
end
let modifies_refl = MG.modifies_refl
let modifies_loc_includes = MG.modifies_loc_includes
let address_liveness_insensitive_locs = MG.address_liveness_insensitive_locs _
let region_liveness_insensitive_locs = MG.region_liveness_insensitive_locs _
let address_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_address_liveness_insensitive_locs_aloc #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let address_liveness_insensitive_addresses =
MG.loc_includes_address_liveness_insensitive_locs_addresses cls
let region_liveness_insensitive_buffer #_ #_ #_ b =
MG.loc_includes_region_liveness_insensitive_locs_loc_of_aloc #_ cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let region_liveness_insensitive_addresses =
MG.loc_includes_region_liveness_insensitive_locs_loc_addresses cls
let region_liveness_insensitive_regions =
MG.loc_includes_region_liveness_insensitive_locs_loc_regions cls
let region_liveness_insensitive_address_liveness_insensitive =
MG.loc_includes_region_liveness_insensitive_locs_address_liveness_insensitive_locs cls
let modifies_liveness_insensitive_mreference = MG.modifies_preserves_liveness
let modifies_liveness_insensitive_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else
liveness_preservation_intro h h' x (fun t' pre r ->
MG.modifies_preserves_liveness_strong l1 l2 h h' r (ubuffer_of_buffer x))
let modifies_liveness_insensitive_region = MG.modifies_preserves_region_liveness
let modifies_liveness_insensitive_region_mreference = MG.modifies_preserves_region_liveness_reference
let modifies_liveness_insensitive_region_buffer l1 l2 h h' #_ #_ #_ x =
if g_is_null x then ()
else MG.modifies_preserves_region_liveness_aloc l1 l2 h h' #(frameOf x) #(as_addr x) (ubuffer_of_buffer x)
let modifies_trans = MG.modifies_trans
let modifies_only_live_regions = MG.modifies_only_live_regions
let no_upd_fresh_region = MG.no_upd_fresh_region
let new_region_modifies = MG.new_region_modifies #_ cls
let modifies_fresh_frame_popped = MG.modifies_fresh_frame_popped
let modifies_loc_regions_intro = MG.modifies_loc_regions_intro #_ #cls
let modifies_loc_addresses_intro = MG.modifies_loc_addresses_intro #_ #cls
let modifies_ralloc_post = MG.modifies_ralloc_post #_ #cls
let modifies_salloc_post = MG.modifies_salloc_post #_ #cls
let modifies_free = MG.modifies_free #_ #cls
let modifies_none_modifies = MG.modifies_none_modifies #_ #cls
let modifies_upd = MG.modifies_upd #_ #cls
val modifies_0_modifies
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (modifies loc_none h1 h2))
let modifies_0_modifies h1 h2 =
MG.modifies_none_intro #_ #cls h1 h2
(fun r -> modifies_0_live_region h1 h2 r)
(fun t pre b -> modifies_0_mreference #t #pre h1 h2 b)
(fun r n -> modifies_0_unused_in h1 h2 r n)
val modifies_1_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_1 b h1 h2))
(ensures (modifies (loc_buffer b) h1 h2))
let modifies_1_modifies #t #_ #_ b h1 h2 =
if g_is_null b
then begin
modifies_1_null b h1 h2;
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer b) h1 h2
(fun r -> modifies_1_live_region b h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer b);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_mreference b h1 h2 p
)
(fun t pre p ->
modifies_1_liveness b h1 h2 p
)
(fun r n ->
modifies_1_unused_in b h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer b);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer b) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_ubuffer #t b h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_mreference b h1 h2 r_)
)
val modifies_1_from_to_modifies
(#a:Type0)(#rrel #rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
:Lemma (requires (modifies_1_from_to b from to h1 h2))
(ensures (modifies (loc_buffer_from_to b from to) h1 h2))
let modifies_1_from_to_modifies #t #_ #_ b from to h1 h2 =
if ubuffer_of_buffer_from_to_none_cond b from to
then begin
modifies_0_modifies h1 h2
end else
MG.modifies_intro (loc_buffer_from_to b from to) h1 h2
(fun r -> modifies_1_from_to_live_region b from to h1 h2 r)
(fun t pre p ->
loc_disjoint_sym (loc_mreference p) (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_addresses_elim #_ #cls #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) true (HS.frameOf p) (Set.singleton (HS.as_addr p));
modifies_1_from_to_mreference b from to h1 h2 p
)
(fun t pre p ->
modifies_1_from_to_liveness b from to h1 h2 p
)
(fun r n ->
modifies_1_from_to_unused_in b from to h1 h2 r n
)
(fun r' a' b' ->
loc_disjoint_sym (MG.loc_of_aloc b') (loc_buffer_from_to b from to);
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b) #(as_addr b) #r' #a' (ubuffer_of_buffer_from_to b from to) b';
if frameOf b = r' && as_addr b = a'
then
modifies_1_from_to_ubuffer #t b from to h1 h2 b'
else
same_mreference_ubuffer_preserved #r' #a' b' h1 h2
(fun a_ pre_ r_ -> modifies_1_from_to_mreference b from to h1 h2 r_)
)
val modifies_addr_of_modifies
(#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
:Lemma (requires (modifies_addr_of b h1 h2))
(ensures (modifies (loc_addr_of_buffer b) h1 h2))
let modifies_addr_of_modifies #t #_ #_ b h1 h2 =
MG.modifies_address_intro #_ #cls (frameOf b) (as_addr b) h1 h2
(fun r -> modifies_addr_of_live_region b h1 h2 r)
(fun t pre p ->
modifies_addr_of_mreference b h1 h2 p
)
(fun r n ->
modifies_addr_of_unused_in b h1 h2 r n
)
val modifies_loc_buffer_from_to_intro'
(#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
(from to: U32.t)
(l: loc) (h h' : HS.mem)
: Lemma
(requires (
let s = as_seq h b in
let s' = as_seq h' b in
not (g_is_null b) /\
live h b /\
modifies (loc_union l (loc_buffer b)) h h' /\
U32.v from <= U32.v to /\
U32.v to <= length b /\
Seq.slice s 0 (U32.v from) `Seq.equal` Seq.slice s' 0 (U32.v from) /\
Seq.slice s (U32.v to) (length b) `Seq.equal` Seq.slice s' (U32.v to) (length b)
))
(ensures (modifies (loc_union l (loc_buffer_from_to b from to)) h h'))
#push-options "--z3rlimit 16"
let modifies_loc_buffer_from_to_intro' #a #rrel #rel b from to l h h' =
let r0 = frameOf b in
let a0 = as_addr b in
let bb : ubuffer r0 a0 = ubuffer_of_buffer b in
modifies_loc_includes (loc_union l (loc_addresses true r0 (Set.singleton a0))) h h' (loc_union l (loc_buffer b));
MG.modifies_strengthen l #r0 #a0 (ubuffer_of_buffer_from_to b from to) h h' (fun f (x: ubuffer r0 a0) ->
ubuffer_preserved_intro x h h'
(fun t' rrel' rel' b' -> f _ _ (Buffer?.content b'))
(fun t' rrel' rel' b' ->
// prove that the types, rrels, rels are equal
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
assert (Seq.seq t' == Seq.seq a);
let _s0 : Seq.seq a = as_seq h b in
let _s1 : Seq.seq t' = coerce_eq _ _s0 in
lemma_equal_instances_implies_equal_types a t' _s0 _s1;
let boff = U32.v (Buffer?.idx b) in
let from_ = boff + U32.v from in
let to_ = boff + U32.v to in
let ({ b_max_length = ml; b_offset = xoff; b_length = xlen; b_is_mm = is_mm }) = Ghost.reveal x in
let ({ b_max_length = _; b_offset = b'off; b_length = b'len }) = Ghost.reveal (ubuffer_of_buffer b') in
let bh = as_seq h b in
let bh' = as_seq h' b in
let xh = Seq.slice (as_seq h b') (xoff - b'off) (xoff - b'off + xlen) in
let xh' = Seq.slice (as_seq h' b') (xoff - b'off) (xoff - b'off + xlen) in
let prf (i: nat) : Lemma
(requires (i < xlen))
(ensures (i < xlen /\ Seq.index xh i == Seq.index xh' i))
= let xi = xoff + i in
let bi : ubuffer r0 a0 =
Ghost.hide ({ b_max_length = ml; b_offset = xi; b_length = 1; b_is_mm = is_mm; })
in
assert (Seq.index xh i == Seq.index (Seq.slice (as_seq h b') (xi - b'off) (xi - b'off + 1)) 0);
assert (Seq.index xh' i == Seq.index (Seq.slice (as_seq h' b') (xi - b'off) (xi - b'off + 1)) 0);
let li = MG.loc_of_aloc bi in
MG.loc_includes_aloc #_ #cls x bi;
loc_disjoint_includes l (MG.loc_of_aloc x) l li;
if xi < boff || boff + length b <= xi
then begin
MG.loc_disjoint_aloc_intro #_ #cls bb bi;
assert (loc_disjoint (loc_union l (loc_buffer b)) li);
MG.modifies_aloc_elim bi (loc_union l (loc_buffer b)) h h'
end else
if xi < from_
then begin
assert (Seq.index xh i == Seq.index (Seq.slice bh 0 (U32.v from)) (xi - boff));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' 0 (U32.v from)) (xi - boff))
end else begin
assert (to_ <= xi);
assert (Seq.index xh i == Seq.index (Seq.slice bh (U32.v to) (length b)) (xi - to_));
assert (Seq.index xh' i == Seq.index (Seq.slice bh' (U32.v to) (length b)) (xi - to_))
end
in
Classical.forall_intro (Classical.move_requires prf);
assert (xh `Seq.equal` xh')
)
)
#pop-options
let modifies_loc_buffer_from_to_intro #a #rrel #rel b from to l h h' =
if g_is_null b
then ()
else modifies_loc_buffer_from_to_intro' b from to l h h'
let does_not_contain_addr = MG.does_not_contain_addr
let not_live_region_does_not_contain_addr = MG.not_live_region_does_not_contain_addr
let unused_in_does_not_contain_addr = MG.unused_in_does_not_contain_addr
let addr_unused_in_does_not_contain_addr = MG.addr_unused_in_does_not_contain_addr
let free_does_not_contain_addr = MG.free_does_not_contain_addr
let does_not_contain_addr_elim = MG.does_not_contain_addr_elim
let modifies_only_live_addresses = MG.modifies_only_live_addresses
let loc_not_unused_in = MG.loc_not_unused_in _
let loc_unused_in = MG.loc_unused_in _
let loc_regions_unused_in = MG.loc_regions_unused_in cls
let loc_unused_in_not_unused_in_disjoint =
MG.loc_unused_in_not_unused_in_disjoint cls
let not_live_region_loc_not_unused_in_disjoint = MG.not_live_region_loc_not_unused_in_disjoint cls
let live_loc_not_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.does_not_contain_addr_addr_unused_in h) (frameOf b, as_addr b);
MG.loc_addresses_not_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let unused_in_loc_unused_in #_ #_ #_ b h =
unused_in_equiv b h;
Classical.move_requires (MG.addr_unused_in_does_not_contain_addr h) (frameOf b, as_addr b);
MG.loc_addresses_unused_in cls (frameOf b) (Set.singleton (as_addr b)) h;
()
let modifies_address_liveness_insensitive_unused_in =
MG.modifies_address_liveness_insensitive_unused_in cls
let modifies_only_not_unused_in = MG.modifies_only_not_unused_in
let mreference_live_loc_not_unused_in =
MG.mreference_live_loc_not_unused_in cls
let mreference_unused_in_loc_unused_in =
MG.mreference_unused_in_loc_unused_in cls
let modifies_loc_unused_in l h1 h2 l' =
modifies_loc_includes address_liveness_insensitive_locs h1 h2 l;
modifies_address_liveness_insensitive_unused_in h1 h2;
loc_includes_trans (loc_unused_in h1) (loc_unused_in h2) l'
let fresh_frame_modifies h0 h1 = MG.fresh_frame_modifies #_ cls h0 h1
let popped_modifies = MG.popped_modifies #_ cls
let modifies_remove_new_locs l_fresh l_aux l_goal h1 h2 h3 =
modifies_only_not_unused_in l_goal h1 h3
let disjoint_neq #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
let empty_disjoint
#t1 #t2 #rrel1 #rel1 #rrel2 #rel2 b1 b2
= let r = frameOf b1 in
let a = as_addr b1 in
if r = frameOf b2 && a = as_addr b2 then
MG.loc_disjoint_aloc_intro #_ #cls #r #a #r #a (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
else ()
(*
let includes_live #a #rrel #rel1 #rel2 h larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let includes_frameOf_as_addr #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
let pointer_distinct_sel_disjoint #a #_ #_ #_ #_ b1 b2 h =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2
then begin
HS.mreference_distinct_sel_disjoint h (Buffer?.content b1) (Buffer?.content b2);
loc_disjoint_buffer b1 b2
end
else
loc_disjoint_buffer b1 b2
let is_null #_ #_ #_ b = Null? b
let msub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content i0 len0 ->
Buffer max_len content (U32.add i0 i) len
let moffset #a #rrel #rel sub_rel b i =
match b with
| Null -> Null
| Buffer max_len content i0 len ->
Buffer max_len content (U32.add i0 i) (Ghost.hide ((U32.sub (Ghost.reveal len) i)))
let index #_ #_ #_ b i =
let open HST in
let s = ! (Buffer?.content b) in
Seq.index s (U32.v (Buffer?.idx b) + U32.v i)
let g_upd_seq #_ #_ #_ b s h =
if Seq.length s = 0 then h
else
let s0 = HS.sel h (Buffer?.content b) in
let Buffer _ content idx length = b in
HS.upd h (Buffer?.content b) (Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v length) s)
let lemma_g_upd_with_same_seq #_ #_ #_ b h =
if Null? b then ()
else
let open FStar.UInt32 in
let Buffer _ content idx length = b in
let s = HS.sel h content in
assert (Seq.equal (Seq.replace_subseq s (v idx) (v idx + v length) (Seq.slice s (v idx) (v idx + v length))) s);
HS.lemma_heap_equality_upd_with_sel h (Buffer?.content b)
#push-options "--z3rlimit 48"
let g_upd_seq_as_seq #a #_ #_ b s h =
let h' = g_upd_seq b s h in
if g_is_null b then assert (Seq.equal s Seq.empty)
else begin
assert (Seq.equal (as_seq h' b) s);
// prove modifies_1_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_modifies b h h'
end
let g_upd_modifies_strong #_ #_ #_ b i v h =
let h' = g_upd b i v h in
// prove modifies_1_from_to_preserves_ubuffers
Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
s_lemma_equal_instances_implies_equal_types ();
modifies_1_from_to_modifies b (U32.uint_to_t i) (U32.uint_to_t (i + 1)) h h'
#pop-options
let upd' #_ #_ #_ b i v =
let open HST in
let h = get() in
let Buffer max_length content idx len = b in
let s0 = !content in
let sb0 = Seq.slice s0 (U32.v idx) (U32.v max_length) in
let s_upd = Seq.upd sb0 (U32.v i) v in
let sf = Seq.replace_subseq s0 (U32.v idx) (U32.v max_length) s_upd in
assert (sf `Seq.equal`
Seq.replace_subseq s0 (U32.v idx) (U32.v idx + U32.v len) (Seq.upd (as_seq h b) (U32.v i) v));
content := sf
let recallable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot Type0 =
(not (g_is_null b)) ==> (
HST.is_eternal_region (frameOf b) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_buf #_ #_ #_ b =
(not (g_is_null b)) ==> (
HS.is_heap_color (HS.color (frameOf b)) /\
not (HS.is_mm (Buffer?.content b)) /\
buffer_compatible b
)
let region_lifetime_sub #a #rrel #rel #subrel b0 b1 =
match b1 with
| Null -> ()
| Buffer max_len content idx length ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j subrel)
let recallable_null #_ #_ #_ = ()
let recallable_mgsub #_ #rrel #rel b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
(*
let recallable_includes #_ #_ #_ #_ #_ #_ larger smaller =
if Null? larger || Null? smaller then ()
else
MG.loc_includes_aloc_elim #_ #cls #(frameOf larger) #(frameOf smaller) #(as_addr larger) #(as_addr smaller) (ubuffer_of_buffer larger) (ubuffer_of_buffer smaller)
*)
let recall #_ #_ #_ b = if Null? b then () else HST.recall (Buffer?.content b)
private let spred_as_mempred (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (p:spred a)
:HST.mem_predicate
= fun h ->
buffer_compatible b ==>
p (as_seq h b)
let witnessed #_ #rrel #rel b p =
match b with
| Null -> p Seq.empty
| Buffer max_length content idx length ->
HST.token_p content (spred_as_mempred b p)
private let lemma_stable_on_rel_is_stable_on_rrel (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (p:spred a)
:Lemma (requires (Buffer? b /\ stable_on p rel))
(ensures (HST.stable_on (spred_as_mempred b p) (Buffer?.content b)))
= let Buffer max_length content idx length = b in
let mp = spred_as_mempred b p in
let aux (h0 h1:HS.mem) :Lemma ((mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content)) ==> mp h1)
= Classical.arrow_to_impl #(mp h0 /\ rrel (HS.sel h0 content) (HS.sel h1 content) /\ buffer_compatible b) #(mp h1)
(fun _ -> assert (rel (as_seq h0 b) (as_seq h1 b)))
in
Classical.forall_intro_2 aux
let witness_p #a #rrel #rel b p =
match b with
| Null -> ()
| Buffer _ content _ _ ->
lemma_stable_on_rel_is_stable_on_rrel b p;
//AR: TODO: the proof doesn't go through without this assertion, which should follow directly from the lemma call
assert (HST.stable_on #(Seq.lseq a (U32.v (Buffer?.max_length b))) #(srel_to_lsrel (U32.v (Buffer?.max_length b)) rrel) (spred_as_mempred b p) (Buffer?.content b));
HST.witness_p content (spred_as_mempred b p)
let recall_p #_ #_ #_ b p =
match b with
| Null -> ()
| Buffer _ content _ _ -> HST.recall_p content (spred_as_mempred b p)
let witnessed_functorial #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 =
match b1, b2 with
| Null, Null -> assert (as_seq HS.empty_mem b1 == Seq.empty)
| Buffer _ content _ _, _ ->
assert (forall (len:nat) (i:nat) (j:nat{i <= j /\ j <= len}). compatible_sub_preorder len rrel i j rel1);
HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2)
let witnessed_functorial_st #a #rrel #rel1 #rel2 b1 b2 i len s1 s2 =
match b1, b2 with
| Null, Null -> ()
| Buffer _ content _ _, _ ->
HST.token_functoriality content (spred_as_mempred b1 s1) (spred_as_mempred b2 s2)
let freeable (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) =
(not (g_is_null b)) /\
HS.is_mm (Buffer?.content b) /\
HS.is_heap_color (HS.color (frameOf b)) /\
U32.v (Buffer?.max_length b) > 0 /\
Buffer?.idx b == 0ul /\
Ghost.reveal (Buffer?.length b) == Buffer?.max_length b
let free #_ #_ #_ b = HST.rfree (Buffer?.content b)
let freeable_length #_ #_ #_ b = ()
let freeable_disjoint #_ #_ #_ #_ #_ #_ b1 b2 =
if frameOf b1 = frameOf b2 && as_addr b1 = as_addr b2 then
MG.loc_disjoint_aloc_elim #_ #cls #(frameOf b1) #(as_addr b1) #(frameOf b2) #(as_addr b2) (ubuffer_of_buffer b1) (ubuffer_of_buffer b2)
private let alloc_heap_common (#a:Type0) (#rrel:srel a)
(r:HST.erid) (len:U32.t{U32.v len > 0}) (s:Seq.seq a{Seq.length s == U32.v len})
(mm:bool)
:HST.ST (lmbuffer a rrel rrel (U32.v len))
(requires (fun _ -> True))
(ensures (fun h0 b h1 -> alloc_post_mem_common b h0 h1 s /\
frameOf b == r /\
HS.is_mm (Buffer?.content b) == mm /\
Buffer?.idx b == 0ul /\
Ghost.reveal (Buffer?.length b) == Buffer?.max_length b))
= lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
if mm then HST.ralloc_mm r s else HST.ralloc r s
in
let b = Buffer len content 0ul (Ghost.hide len) in
b
let mgcmalloc #_ #_ r init len =
alloc_heap_common r len (Seq.create (U32.v len) init) false
private let read_sub_buffer (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel) (idx len:U32.t)
: HST.ST (Seq.seq a)
(requires fun h0 ->
live h0 b /\ U32.v len > 0 /\
U32.v idx + U32.v len <= length b)
(ensures fun h0 s h1 ->
h0 == h1 /\
s == Seq.slice (as_seq h0 b) (U32.v idx) (U32.v idx + U32.v len))
= let open HST in
let s = ! (Buffer?.content b) in //the whole allocation unit
let s = Seq.slice s (U32.v (Buffer?.idx b))
(U32.v (Buffer?.max_length b)) in //b buffer
Seq.slice s (U32.v idx) (U32.v idx + U32.v len) //slice of b
let mgcmalloc_and_blit #_ #_ r #_ #_ src id_src len =
alloc_heap_common r len (read_sub_buffer src id_src len) false
let mmalloc #_ #_ r init len =
alloc_heap_common r len (Seq.create (U32.v len) init) true
let mmalloc_and_blit #_ #_ r #_ #_ src id_src len =
alloc_heap_common r len (read_sub_buffer src id_src len) true
let malloca #a #rrel init len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.salloc (Seq.create (U32.v len) init)
in
Buffer len content 0ul (Ghost.hide len)
let malloca_and_blit #a #rrel #_ #_ src id_src len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.salloc (read_sub_buffer src id_src len)
in
Buffer len content 0ul (Ghost.hide len)
let malloca_of_list #a #rrel init =
let len = U32.uint_to_t (FStar.List.Tot.length init) in
let s = Seq.seq_of_list init in
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.salloc s
in
Buffer len content 0ul (Ghost.hide len)
let mgcmalloc_of_list #a #rrel r init =
let len = U32.uint_to_t (FStar.List.Tot.length init) in
let s = Seq.seq_of_list init in
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content: HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc r s
in
Buffer len content 0ul (Ghost.hide len)
let mmalloc_drgn #a #rrel d init len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content : HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc_drgn d (Seq.create (U32.v len) init)
in
Buffer len content 0ul len
let mmalloc_drgn_mm #a #rrel d init len =
lemma_seq_sub_compatilibity_is_reflexive (U32.v len) rrel;
let content : HST.mreference (Seq.lseq a (U32.v len)) (srel_to_lsrel (U32.v len) rrel) =
HST.ralloc_drgn_mm d (Seq.create (U32.v len) init)
in
Buffer len content 0ul len | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mmalloc_drgn_and_blit (#a:Type0) (#rrel:srel a)
(#rrel1 #rel1:srel a)
(d:HST.drgn) (src:mbuffer a rrel1 rel1) (id_src:U32.t) (len:U32.t)
: HST.ST (b:lmbuffer a rrel rrel (U32.v len){frameOf b == HST.rid_of_drgn d /\ region_lifetime_buf b})
(requires fun h ->
alloc_drgn_pre h d len /\
live h src /\
U32.v id_src + U32.v len <= length src)
(ensures fun h0 b h1 ->
alloc_post_mem_common b h0 h1
(Seq.slice (as_seq h0 src) (U32.v id_src) (U32.v id_src + U32.v len))) | [] | LowStar.Monotonic.Buffer.mmalloc_drgn_and_blit | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
d: FStar.HyperStack.ST.drgn ->
src: LowStar.Monotonic.Buffer.mbuffer a rrel1 rel1 ->
id_src: FStar.UInt32.t ->
len: FStar.UInt32.t
-> FStar.HyperStack.ST.ST
(b:
LowStar.Monotonic.Buffer.lmbuffer a rrel rrel (FStar.UInt32.v len)
{ LowStar.Monotonic.Buffer.frameOf b == FStar.HyperStack.ST.rid_of_drgn d /\
LowStar.Monotonic.Buffer.region_lifetime_buf b }) | {
"end_col": 28,
"end_line": 1561,
"start_col": 2,
"start_line": 1557
} |
FStar.Pervasives.Lemma | val s_lemma_equal_instances_implies_equal_types: unit
-> Lemma
(forall (a: Type) (b: Type) (s1: Seq.seq a) (s2: Seq.seq b).
{:pattern (has_type s1 (Seq.seq a)); (has_type s2 (Seq.seq b))}
s1 === s2 ==> a == b) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types() | val s_lemma_equal_instances_implies_equal_types: unit
-> Lemma
(forall (a: Type) (b: Type) (s1: Seq.seq a) (s2: Seq.seq b).
{:pattern (has_type s1 (Seq.seq a)); (has_type s2 (Seq.seq b))}
s1 === s2 ==> a == b)
let s_lemma_equal_instances_implies_equal_types (_: unit)
: Lemma
(forall (a: Type) (b: Type) (s1: Seq.seq a) (s2: Seq.seq b).
{:pattern (has_type s1 (Seq.seq a)); (has_type s2 (Seq.seq b))}
s1 === s2 ==> a == b) = | false | null | true | Seq.lemma_equal_instances_implies_equal_types () | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"Prims.unit",
"FStar.Seq.Base.lemma_equal_instances_implies_equal_types",
"Prims.l_True",
"Prims.squash",
"Prims.l_Forall",
"FStar.Seq.Base.seq",
"Prims.l_imp",
"Prims.op_Equals_Equals_Equals",
"Prims.eq2",
"Prims.has_type",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) } | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val s_lemma_equal_instances_implies_equal_types: unit
-> Lemma
(forall (a: Type) (b: Type) (s1: Seq.seq a) (s2: Seq.seq b).
{:pattern (has_type s1 (Seq.seq a)); (has_type s2 (Seq.seq b))}
s1 === s2 ==> a == b) | [] | LowStar.Monotonic.Buffer.s_lemma_equal_instances_implies_equal_types | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | _: Prims.unit
-> FStar.Pervasives.Lemma
(ensures
forall (a: Type) (b: Type) (s1: FStar.Seq.Base.seq a) (s2: FStar.Seq.Base.seq b).
{:pattern
Prims.has_type s1 (FStar.Seq.Base.seq a); Prims.has_type s2 (FStar.Seq.Base.seq b)}
s1 === s2 ==> a == b) | {
"end_col": 51,
"end_line": 214,
"start_col": 4,
"start_line": 214
} |
Prims.Tot | val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } ) | val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = | false | null | false | (x: ubuffer_{x.b_offset + x.b_length <= x.b_max_length}) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"LowStar.Monotonic.Buffer.ubuffer_",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"LowStar.Monotonic.Buffer.__proj__Mkubuffer___item__b_offset",
"LowStar.Monotonic.Buffer.__proj__Mkubuffer___item__b_length",
"LowStar.Monotonic.Buffer.__proj__Mkubuffer___item__b_max_length"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0 | false | true | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0 | [] | LowStar.Monotonic.Buffer.ubuffer' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | region: FStar.Monotonic.HyperHeap.rid -> addr: Prims.nat -> Type0 | {
"end_col": 87,
"end_line": 259,
"start_col": 27,
"start_line": 259
} |
Prims.GTot | val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer_preserved = ubuffer_preserved' | val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = | false | null | false | ubuffer_preserved' | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.ubuffer_preserved'"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0 | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0 | [] | LowStar.Monotonic.Buffer.ubuffer_preserved | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.ubuffer r a ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 42,
"end_line": 302,
"start_col": 24,
"start_line": 302
} |
Prims.Tot | val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b | val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = | false | null | false | ubuffer_of_buffer' b | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer'",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b)) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b)) | [] | LowStar.Monotonic.Buffer.ubuffer_of_buffer | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> LowStar.Monotonic.Buffer.ubuffer (LowStar.Monotonic.Buffer.frameOf b)
(LowStar.Monotonic.Buffer.as_addr b) | {
"end_col": 55,
"end_line": 419,
"start_col": 35,
"start_line": 419
} |
FStar.Pervasives.Lemma | val lemma_seq_sub_compatibility_is_transitive
(#a: Type0)
(len: nat)
(rel: srel a)
(i1 j1: nat)
(rel1: srel a)
(i2 j2: nat)
(rel2: srel a)
: Lemma
(requires
(i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1 | val lemma_seq_sub_compatibility_is_transitive
(#a: Type0)
(len: nat)
(rel: srel a)
(i1 j1: nat)
(rel1: srel a)
(i2 j2: nat)
(rel2: srel a)
: Lemma
(requires
(i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
let lemma_seq_sub_compatibility_is_transitive
(#a: Type0)
(len: nat)
(rel: srel a)
(i1 j1: nat)
(rel1: srel a)
(i2 j2: nat)
(rel2: srel a)
: Lemma
(requires
(i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2)) = | false | null | true | let t1 (s1 s2: Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2: Seq.seq a) =
t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2))
in
let aux0 (s1 s2: Seq.seq a) : Lemma (t1 s1 s2 ==> t2 s1 s2) =
Classical.arrow_to_impl #(t1 s1 s2)
#(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2)))
)
in
let t1 (s s2: Seq.seq a) =
Seq.length s == len /\ Seq.length s2 == j2 - i2 /\ rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2
in
let t2 (s s2: Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2: Seq.seq a) : Lemma (t1 s s2 ==> t2 s s2) =
Classical.arrow_to_impl #(t1 s s2)
#(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
);
assert (Seq.equal (Seq.replace_subseq s
i1
j1
(Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0;
Classical.forall_intro_2 aux1 | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"Prims.nat",
"LowStar.Monotonic.Buffer.srel",
"FStar.Classical.forall_intro_2",
"FStar.Seq.Base.seq",
"Prims.l_imp",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"FStar.Classical.arrow_to_impl",
"Prims._assert",
"FStar.Seq.Base.equal",
"FStar.Seq.Properties.replace_subseq",
"FStar.Seq.Base.slice",
"Prims.op_Addition",
"Prims.logical",
"Prims.l_and",
"Prims.eq2",
"FStar.Seq.Base.length",
"Prims.int",
"Prims.op_Subtraction",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"LowStar.Monotonic.Buffer.compatible_sub_preorder"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2)) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lemma_seq_sub_compatibility_is_transitive
(#a: Type0)
(len: nat)
(rel: srel a)
(i1 j1: nat)
(rel1: srel a)
(i2 j2: nat)
(rel2: srel a)
: Lemma
(requires
(i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2)) | [] | LowStar.Monotonic.Buffer.lemma_seq_sub_compatibility_is_transitive | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
len: Prims.nat ->
rel: LowStar.Monotonic.Buffer.srel a ->
i1: Prims.nat ->
j1: Prims.nat ->
rel1: LowStar.Monotonic.Buffer.srel a ->
i2: Prims.nat ->
j2: Prims.nat ->
rel2: LowStar.Monotonic.Buffer.srel a
-> FStar.Pervasives.Lemma
(requires
i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
LowStar.Monotonic.Buffer.compatible_sub_preorder len rel i1 j1 rel1 /\
LowStar.Monotonic.Buffer.compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2)
(ensures LowStar.Monotonic.Buffer.compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2) | {
"end_col": 64,
"end_line": 83,
"start_col": 3,
"start_line": 57
} |
Prims.GTot | val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b | val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0
let live #_ #rrel #rel h b = | false | null | false | match b with
| Null -> True
| Buffer max_length content idx length -> h `HS.contains` content /\ buffer_compatible b | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_True",
"FStar.UInt32.t",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.Ghost.erased",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Ghost.reveal",
"Prims.l_and",
"FStar.Monotonic.HyperStack.contains",
"LowStar.Monotonic.Buffer.buffer_compatible"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val live (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot Type0 | [] | LowStar.Monotonic.Buffer.live | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h: FStar.Monotonic.HyperStack.mem -> b: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> Prims.GTot Type0 | {
"end_col": 25,
"end_line": 117,
"start_col": 2,
"start_line": 113
} |
Prims.Tot | val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b) | val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid
let frameOf #_ #_ #_ b = | false | null | false | if Null? b then HS.root else HS.frameOf (Buffer?.content b) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.uu___is_Null",
"FStar.Monotonic.HyperHeap.root",
"Prims.bool",
"FStar.Monotonic.HyperStack.frameOf",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__max_length",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__content",
"FStar.Monotonic.HyperHeap.rid"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = () | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val frameOf (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :Tot HS.rid | [] | LowStar.Monotonic.Buffer.frameOf | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> FStar.Monotonic.HyperHeap.rid | {
"end_col": 84,
"end_line": 125,
"start_col": 25,
"start_line": 125
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller) | let ubuffer_includes0
(#r1 #r2: HS.rid)
(#a1 #a2: nat)
(larger: ubuffer r1 a1)
(smaller: ubuffer r2 a2)
= | false | null | false | r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"LowStar.Monotonic.Buffer.ubuffer",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.ubuffer_includes'",
"FStar.Ghost.reveal",
"LowStar.Monotonic.Buffer.ubuffer'",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer_includes0 : larger: LowStar.Monotonic.Buffer.ubuffer r1 a1 -> smaller: LowStar.Monotonic.Buffer.ubuffer r2 a2
-> Prims.logical | [] | LowStar.Monotonic.Buffer.ubuffer_includes0 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | larger: LowStar.Monotonic.Buffer.ubuffer r1 a1 -> smaller: LowStar.Monotonic.Buffer.ubuffer r2 a2
-> Prims.logical | {
"end_col": 80,
"end_line": 472,
"start_col": 2,
"start_line": 472
} |
|
Prims.GTot | val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller | val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = | false | null | false | ubuffer_includes0 larger smaller | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.ubuffer_includes0"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0 | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0 | [] | LowStar.Monotonic.Buffer.ubuffer_includes | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | larger: LowStar.Monotonic.Buffer.ubuffer r a -> smaller: LowStar.Monotonic.Buffer.ubuffer r a
-> Prims.GTot Type0 | {
"end_col": 76,
"end_line": 476,
"start_col": 44,
"start_line": 476
} |
Prims.GTot | val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b) | val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat
let as_addr #_ #_ #_ b = | false | null | false | if g_is_null b then 0 else HS.as_addr (Buffer?.content b) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.g_is_null",
"Prims.bool",
"FStar.Monotonic.HyperStack.as_addr",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__max_length",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__content",
"Prims.nat"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val as_addr (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot nat | [] | LowStar.Monotonic.Buffer.as_addr | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> Prims.GTot Prims.nat | {
"end_col": 83,
"end_line": 127,
"start_col": 26,
"start_line": 127
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2) | let ubuffer_disjoint0 (#r1 #r2: HS.rid) (#a1 #a2: nat) (b1: ubuffer r1 a1) (b2: ubuffer r2 a2) = | false | null | false | r1 == r2 /\ a1 == a2 /\ ubuffer_disjoint' (G.reveal b1) (G.reveal b2) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"LowStar.Monotonic.Buffer.ubuffer",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.ubuffer_disjoint'",
"FStar.Ghost.reveal",
"LowStar.Monotonic.Buffer.ubuffer'",
"Prims.logical"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer_disjoint0 : b1: LowStar.Monotonic.Buffer.ubuffer r1 a1 -> b2: LowStar.Monotonic.Buffer.ubuffer r2 a2
-> Prims.logical | [] | LowStar.Monotonic.Buffer.ubuffer_disjoint0 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b1: LowStar.Monotonic.Buffer.ubuffer r1 a1 -> b2: LowStar.Monotonic.Buffer.ubuffer r2 a2
-> Prims.logical | {
"end_col": 47,
"end_line": 522,
"start_col": 2,
"start_line": 521
} |
|
Prims.GTot | val ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset)) | val ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 = | false | null | false | if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/ x2.b_offset + x2.b_length <= x1.b_offset)) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.ubuffer_",
"Prims.op_BarBar",
"Prims.op_Equality",
"Prims.int",
"LowStar.Monotonic.Buffer.__proj__Mkubuffer___item__b_length",
"Prims.l_True",
"Prims.bool",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.__proj__Mkubuffer___item__b_max_length",
"Prims.l_or",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"LowStar.Monotonic.Buffer.__proj__Mkubuffer___item__b_offset"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 | [] | LowStar.Monotonic.Buffer.ubuffer_disjoint' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | x1: LowStar.Monotonic.Buffer.ubuffer_ -> x2: LowStar.Monotonic.Buffer.ubuffer_ -> Prims.GTot Type0 | {
"end_col": 47,
"end_line": 517,
"start_col": 2,
"start_line": 512
} |
FStar.Pervasives.Lemma | val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b)))) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else () | val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b))))
let unused_in_equiv #_ #_ #_ b h = | false | null | true | if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.Heap.not_addr_unused_in_nullptr",
"FStar.Map.sel",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Monotonic.Heap.heap",
"FStar.Monotonic.HyperStack.get_hmap",
"FStar.Monotonic.HyperHeap.root",
"Prims.bool",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val unused_in_equiv (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) (h:HS.mem)
:Lemma (unused_in b h <==>
(HS.live_region h (frameOf b) ==> as_addr b `Heap.addr_unused_in` (Map.sel (HS.get_hmap h) (frameOf b)))) | [] | LowStar.Monotonic.Buffer.unused_in_equiv | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.unused_in b h <==>
FStar.Monotonic.HyperStack.live_region h (LowStar.Monotonic.Buffer.frameOf b) ==>
FStar.Monotonic.Heap.addr_unused_in (LowStar.Monotonic.Buffer.as_addr b)
(FStar.Map.sel (FStar.Monotonic.HyperStack.get_hmap h) (LowStar.Monotonic.Buffer.frameOf b))
) | {
"end_col": 95,
"end_line": 130,
"start_col": 2,
"start_line": 130
} |
Prims.GTot | val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len | val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t
let len #_ #_ #_ b = | false | null | false | match b with
| Null -> 0ul
| Buffer _ _ _ len -> len | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.__uint_to_t",
"FStar.UInt32.t",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.Ghost.erased",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Ghost.reveal"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = () | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val len (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot U32.t | [] | LowStar.Monotonic.Buffer.len | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> Prims.GTot FStar.UInt32.t | {
"end_col": 27,
"end_line": 137,
"start_col": 2,
"start_line": 135
} |
Prims.GTot | val modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r) | val modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 = | false | null | false | forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre).
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperStack.mem",
"Prims.l_Forall",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"Prims.l_imp",
"FStar.Monotonic.HyperStack.contains",
"Prims.l_and",
"Prims.eq2",
"FStar.Monotonic.HyperStack.sel"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_0_preserves_mreferences | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h1: FStar.Monotonic.HyperStack.mem -> h2: FStar.Monotonic.HyperStack.mem -> Prims.GTot Type0 | {
"end_col": 75,
"end_line": 558,
"start_col": 2,
"start_line": 557
} |
Prims.GTot | val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len) | val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a)
let as_seq #_ #_ #_ h b = | false | null | false | match b with
| Null -> Seq.empty
| Buffer max_len content idx len -> Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Seq.Base.empty",
"FStar.UInt32.t",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.Ghost.erased",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Ghost.reveal",
"FStar.Seq.Base.slice",
"FStar.Monotonic.HyperStack.sel",
"FStar.Seq.Base.seq"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = () | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val as_seq (#a:Type0) (#rrel #rel:srel a) (h:HS.mem) (b:mbuffer a rrel rel) :GTot (Seq.seq a) | [] | LowStar.Monotonic.Buffer.as_seq | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h: FStar.Monotonic.HyperStack.mem -> b: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> Prims.GTot (FStar.Seq.Base.seq a) | {
"end_col": 68,
"end_line": 145,
"start_col": 2,
"start_line": 142
} |
Prims.GTot | val modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r | val modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 = | false | null | false | forall (r: HS.rid). HS.live_region h1 r ==> HS.live_region h2 r | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperStack.mem",
"Prims.l_Forall",
"FStar.Monotonic.HyperHeap.rid",
"Prims.l_imp",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_0_preserves_regions | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h1: FStar.Monotonic.HyperStack.mem -> h2: FStar.Monotonic.HyperStack.mem -> Prims.GTot Type0 | {
"end_col": 66,
"end_line": 561,
"start_col": 2,
"start_line": 561
} |
Prims.Ghost | val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len) | val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True))
let mgsub #a #rrel #rel sub_rel b i len = | false | null | false | match b with
| Null -> Null
| Buffer max_len content idx length -> Buffer max_len content (U32.add idx i) (Ghost.hide len) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.Null",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.Ghost.erased",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Ghost.reveal",
"LowStar.Monotonic.Buffer.Buffer",
"FStar.UInt32.add",
"FStar.Ghost.hide"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = () | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mgsub (#a:Type0) (#rrel #rel:srel a) (sub_rel:srel a)
(b:mbuffer a rrel rel) (i:U32.t) (len:U32.t)
:Ghost (mbuffer a rrel sub_rel)
(requires (U32.v i + U32.v len <= length b))
(ensures (fun _ -> True)) | [] | LowStar.Monotonic.Buffer.mgsub | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
sub_rel: LowStar.Monotonic.Buffer.srel a ->
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
i: FStar.UInt32.t ->
len: FStar.UInt32.t
-> Prims.Ghost (LowStar.Monotonic.Buffer.mbuffer a rrel sub_rel) | {
"end_col": 59,
"end_line": 155,
"start_col": 2,
"start_line": 152
} |
Prims.GTot | val modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
) | val modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 = | false | null | false | forall (r: HS.rid) (n: nat).
(HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` ((HS.get_hmap h2) `Map.sel` r)) ==>
(n `Heap.addr_unused_in` ((HS.get_hmap h1) `Map.sel` r)) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperStack.mem",
"Prims.l_Forall",
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"Prims.l_imp",
"Prims.l_and",
"Prims.b2t",
"FStar.Monotonic.HyperStack.live_region",
"FStar.Monotonic.Heap.addr_unused_in",
"FStar.Map.sel",
"FStar.Monotonic.Heap.heap",
"FStar.Monotonic.HyperStack.get_hmap"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_0_preserves_not_unused_in | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h1: FStar.Monotonic.HyperStack.mem -> h2: FStar.Monotonic.HyperStack.mem -> Prims.GTot Type0 | {
"end_col": 3,
"end_line": 569,
"start_col": 2,
"start_line": 564
} |
FStar.Pervasives.Lemma | val lemma_equal_instances_implies_equal_types (a b: Type) (s1: Seq.seq a) (s2: Seq.seq b)
: Lemma (requires s1 === s2) (ensures a == b) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types () | val lemma_equal_instances_implies_equal_types (a b: Type) (s1: Seq.seq a) (s2: Seq.seq b)
: Lemma (requires s1 === s2) (ensures a == b)
let lemma_equal_instances_implies_equal_types (a b: Type) (s1: Seq.seq a) (s2: Seq.seq b)
: Lemma (requires s1 === s2) (ensures a == b) = | false | null | true | Seq.lemma_equal_instances_implies_equal_types () | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.Seq.Base.seq",
"FStar.Seq.Base.lemma_equal_instances_implies_equal_types",
"Prims.unit",
"Prims.op_Equals_Equals_Equals",
"Prims.squash",
"Prims.eq2",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lemma_equal_instances_implies_equal_types (a b: Type) (s1: Seq.seq a) (s2: Seq.seq b)
: Lemma (requires s1 === s2) (ensures a == b) | [] | LowStar.Monotonic.Buffer.lemma_equal_instances_implies_equal_types | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type -> b: Type -> s1: FStar.Seq.Base.seq a -> s2: FStar.Seq.Base.seq b
-> FStar.Pervasives.Lemma (requires s1 === s2) (ensures a == b) | {
"end_col": 52,
"end_line": 207,
"start_col": 4,
"start_line": 207
} |
Prims.GTot | val modifies_0' (h1 h2: HS.mem) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 | val modifies_0' (h1 h2: HS.mem) : GTot Type0
let modifies_0' (h1 h2: HS.mem) : GTot Type0 = | false | null | false | modifies_0_preserves_mreferences h1 h2 /\ modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies_0_preserves_mreferences",
"LowStar.Monotonic.Buffer.modifies_0_preserves_regions",
"LowStar.Monotonic.Buffer.modifies_0_preserves_not_unused_in"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_0' (h1 h2: HS.mem) : GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_0' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h1: FStar.Monotonic.HyperStack.mem -> h2: FStar.Monotonic.HyperStack.mem -> Prims.GTot Type0 | {
"end_col": 42,
"end_line": 574,
"start_col": 2,
"start_line": 572
} |
Prims.GTot | val modifies_0 (h1 h2: HS.mem) : GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_0 = modifies_0' | val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = | false | null | false | modifies_0' | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.modifies_0'"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0 | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_0 (h1 h2: HS.mem) : GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_0 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | h1: FStar.Monotonic.HyperStack.mem -> h2: FStar.Monotonic.HyperStack.mem -> Prims.GTot Type0 | {
"end_col": 28,
"end_line": 578,
"start_col": 17,
"start_line": 578
} |
FStar.Pervasives.Lemma | val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel | val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b))
let gsub_zero_length #_ #_ #rel b = | false | null | true | lemma_seq_sub_compatilibity_is_reflexive (length b) rel | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.lemma_seq_sub_compatilibity_is_reflexive",
"LowStar.Monotonic.Buffer.length",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``. | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gsub_zero_length (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel)
:Lemma (compatible_sub b 0ul (len b) rel /\ b == mgsub rel b 0ul (len b)) | [] | LowStar.Monotonic.Buffer.gsub_zero_length | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.compatible_sub b 0ul (LowStar.Monotonic.Buffer.len b) rel /\
b == LowStar.Monotonic.Buffer.mgsub rel b 0ul (LowStar.Monotonic.Buffer.len b)) | {
"end_col": 91,
"end_line": 196,
"start_col": 36,
"start_line": 196
} |
Prims.Tot | val ubuffer_of_buffer' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Tot (ubuffer (frameOf b) (as_addr b)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
}) | val ubuffer_of_buffer' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Tot (ubuffer (frameOf b) (as_addr b)) = | false | null | false | if Null? b
then Ghost.hide ({ b_max_length = 0; b_offset = 0; b_length = 0; b_is_mm = false })
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b)
}) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"total"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.uu___is_Null",
"FStar.Ghost.hide",
"LowStar.Monotonic.Buffer.ubuffer'",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.Mkubuffer_",
"Prims.bool",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__max_length",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__idx",
"FStar.Ghost.reveal",
"FStar.UInt32.t",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__length",
"FStar.Monotonic.HyperStack.is_mm",
"FStar.Seq.Properties.lseq",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__content",
"LowStar.Monotonic.Buffer.ubuffer"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer_of_buffer' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel)
: Tot (ubuffer (frameOf b) (as_addr b)) | [] | LowStar.Monotonic.Buffer.ubuffer_of_buffer' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel
-> LowStar.Monotonic.Buffer.ubuffer (LowStar.Monotonic.Buffer.frameOf b)
(LowStar.Monotonic.Buffer.as_addr b) | {
"end_col": 6,
"end_line": 279,
"start_col": 4,
"start_line": 265
} |
FStar.Pervasives.Lemma | val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf () | val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]]
let live_gsub #_ #rrel #rel _ b i len sub_rel = | false | null | true | match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf ()
: Lemma (requires (buffer_compatible b)) (ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len)
rrel
(U32.v idx)
(U32.v idx + U32.v length)
rel
(U32.v i)
(U32.v i + U32.v len)
sub_rel
in
Classical.move_requires prf () | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.Ghost.erased",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Ghost.reveal",
"FStar.Classical.move_requires",
"Prims.unit",
"LowStar.Monotonic.Buffer.buffer_compatible",
"LowStar.Monotonic.Buffer.mgsub",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowStar.Monotonic.Buffer.lemma_seq_sub_compatibility_is_transitive"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val live_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b /\ compatible_sub b i len sub_rel))
(ensures (live h b <==> (live h (mgsub sub_rel b i len) /\ (exists h0 . {:pattern (live h0 b)} live h0 b))))
[SMTPatOr [
[SMTPat (live h (mgsub sub_rel b i len))];
[SMTPat (live h b); SMTPat (mgsub sub_rel b i len);]
]] | [] | LowStar.Monotonic.Buffer.live_gsub | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
h: FStar.Monotonic.HyperStack.mem ->
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
i: FStar.UInt32.t ->
len: FStar.UInt32.t ->
sub_rel: LowStar.Monotonic.Buffer.srel a
-> FStar.Pervasives.Lemma
(requires
FStar.UInt32.v i + FStar.UInt32.v len <= LowStar.Monotonic.Buffer.length b /\
LowStar.Monotonic.Buffer.compatible_sub b i len sub_rel)
(ensures
LowStar.Monotonic.Buffer.live h b <==>
LowStar.Monotonic.Buffer.live h (LowStar.Monotonic.Buffer.mgsub sub_rel b i len) /\
(exists (h0: FStar.Monotonic.HyperStack.mem). {:pattern LowStar.Monotonic.Buffer.live h0 b}
LowStar.Monotonic.Buffer.live h0 b))
[
SMTPatOr [
[
SMTPat (LowStar.Monotonic.Buffer.live h
(LowStar.Monotonic.Buffer.mgsub sub_rel b i len))
];
[
SMTPat (LowStar.Monotonic.Buffer.live h b);
SMTPat (LowStar.Monotonic.Buffer.mgsub sub_rel b i len)
]
]
] | {
"end_col": 34,
"end_line": 169,
"start_col": 2,
"start_line": 158
} |
FStar.Pervasives.Lemma | val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h | val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2))
let live_same_addresses_equal_types_and_preorders #_ #_ #_ #_ #_ #_ b1 b2 h = | false | null | true | Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"FStar.Classical.move_requires",
"Prims.l_and",
"Prims.eq2",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"Prims.nat",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.live",
"Prims.l_not",
"Prims.b2t",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Preorder.relation",
"FStar.Seq.Base.seq",
"Prims.l_or",
"FStar.Preorder.preorder_rel",
"LowStar.Monotonic.Buffer.live_same_addresses_equal_types_and_preorders'",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val live_same_addresses_equal_types_and_preorders
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
((frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\ (~ (g_is_null b1 /\ g_is_null b2))) ==> (a1 == a2 /\ rrel1 == rrel2)) | [] | LowStar.Monotonic.Buffer.live_same_addresses_equal_types_and_preorders | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b1: LowStar.Monotonic.Buffer.mbuffer a1 rrel1 rel1 ->
b2: LowStar.Monotonic.Buffer.mbuffer a2 rrel2 rel2 ->
h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.frameOf b1 == LowStar.Monotonic.Buffer.frameOf b2 /\
LowStar.Monotonic.Buffer.as_addr b1 == LowStar.Monotonic.Buffer.as_addr b2 /\
LowStar.Monotonic.Buffer.live h b1 /\ LowStar.Monotonic.Buffer.live h b2 /\
~(LowStar.Monotonic.Buffer.g_is_null b1 /\ LowStar.Monotonic.Buffer.g_is_null b2) ==>
a1 == a2 /\ rrel1 == rrel2) | {
"end_col": 82,
"end_line": 243,
"start_col": 2,
"start_line": 243
} |
Prims.GTot | val modifies_1_preserves_livenesses
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r' | val modifies_1_preserves_livenesses
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0
let modifies_1_preserves_livenesses
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 = | false | null | false | forall (a': Type) (pre: Preorder.preorder a') (r': HS.mreference a' pre).
h1 `HS.contains` r' ==> h2 `HS.contains` r' | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_Forall",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"Prims.l_imp",
"FStar.Monotonic.HyperStack.contains"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_1_preserves_livenesses
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_1_preserves_livenesses | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 119,
"end_line": 634,
"start_col": 4,
"start_line": 634
} |
Prims.GTot | val modifies_1_from_to_preserves_ubuffers
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
(h1 h2: HS.mem)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2 | val modifies_1_from_to_preserves_ubuffers
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
(h1 h2: HS.mem)
: GTot Type0
let modifies_1_from_to_preserves_ubuffers
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
(h1 h2: HS.mem)
: GTot Type0 = | false | null | false | forall (b': ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==>
ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2 | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_Forall",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"Prims.l_imp",
"LowStar.Monotonic.Buffer.ubuffer_disjoint",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer_from_to",
"LowStar.Monotonic.Buffer.ubuffer_preserved"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_1_from_to_preserves_ubuffers
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
(h1 h2: HS.mem)
: GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_1_from_to_preserves_ubuffers | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
from: FStar.UInt32.t ->
to: FStar.UInt32.t ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 148,
"end_line": 630,
"start_col": 4,
"start_line": 629
} |
FStar.Pervasives.Lemma | val live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\
(~(g_is_null b1 /\ g_is_null b2))) (ensures a1 == a2 /\ rrel1 == rrel2) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1' | val live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\
(~(g_is_null b1 /\ g_is_null b2))) (ensures a1 == a2 /\ rrel1 == rrel2)
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\
(~(g_is_null b1 /\ g_is_null b2))) (ensures a1 == a2 /\ rrel1 == rrel2) = | false | null | true | Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1:Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1':Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1' | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.lemma_equal_instances_implies_equal_types",
"Prims.unit",
"Prims._assert",
"Prims.op_Equals_Equals_Equals",
"FStar.Seq.Base.seq",
"FStar.Pervasives.coerce_eq",
"Prims.eq2",
"LowStar.Monotonic.Buffer.as_seq",
"FStar.Monotonic.Heap.lemma_distinct_addrs_distinct_mm",
"FStar.Monotonic.Heap.lemma_distinct_addrs_distinct_preorders",
"Prims.l_and",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"Prims.nat",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.live",
"Prims.l_not",
"Prims.b2t",
"LowStar.Monotonic.Buffer.g_is_null",
"Prims.squash",
"FStar.Preorder.relation",
"Prims.l_or",
"FStar.Preorder.preorder_rel",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\ | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\ as_addr b1 == as_addr b2 /\ live h b1 /\ live h b2 /\
(~(g_is_null b1 /\ g_is_null b2))) (ensures a1 == a2 /\ rrel1 == rrel2) | [] | LowStar.Monotonic.Buffer.live_same_addresses_equal_types_and_preorders' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b1: LowStar.Monotonic.Buffer.mbuffer a1 rrel1 rel1 ->
b2: LowStar.Monotonic.Buffer.mbuffer a2 rrel2 rel2 ->
h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.frameOf b1 == LowStar.Monotonic.Buffer.frameOf b2 /\
LowStar.Monotonic.Buffer.as_addr b1 == LowStar.Monotonic.Buffer.as_addr b2 /\
LowStar.Monotonic.Buffer.live h b1 /\ LowStar.Monotonic.Buffer.live h b2 /\
~(LowStar.Monotonic.Buffer.g_is_null b1 /\ LowStar.Monotonic.Buffer.g_is_null b2))
(ensures a1 == a2 /\ rrel1 == rrel2) | {
"end_col": 58,
"end_line": 239,
"start_col": 4,
"start_line": 233
} |
FStar.Pervasives.Lemma | val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len) | val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))]
let as_seq_gsub #_ #_ #_ h b i len _ = | false | null | true | match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content)
(U32.v idx)
(U32.v idx + U32.v len0)
(U32.v i)
(U32.v i + U32.v len) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.Ghost.erased",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Ghost.reveal",
"FStar.Seq.Properties.slice_slice",
"FStar.Monotonic.HyperStack.sel",
"Prims.unit"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val as_seq_gsub (#a:Type0) (#rrel #rel:srel a)
(h:HS.mem) (b:mbuffer a rrel rel) (i:U32.t) (len:U32.t) (sub_rel:srel a)
:Lemma (requires (U32.v i + U32.v len <= length b))
(ensures (as_seq h (mgsub sub_rel b i len) == Seq.slice (as_seq h b) (U32.v i) (U32.v i + U32.v len)))
[SMTPat (as_seq h (mgsub sub_rel b i len))] | [] | LowStar.Monotonic.Buffer.as_seq_gsub | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
h: FStar.Monotonic.HyperStack.mem ->
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
i: FStar.UInt32.t ->
len: FStar.UInt32.t ->
sub_rel: LowStar.Monotonic.Buffer.srel a
-> FStar.Pervasives.Lemma
(requires FStar.UInt32.v i + FStar.UInt32.v len <= LowStar.Monotonic.Buffer.length b)
(ensures
LowStar.Monotonic.Buffer.as_seq h (LowStar.Monotonic.Buffer.mgsub sub_rel b i len) ==
FStar.Seq.Base.slice (LowStar.Monotonic.Buffer.as_seq h b)
(FStar.UInt32.v i)
(FStar.UInt32.v i + FStar.UInt32.v len))
[SMTPat (LowStar.Monotonic.Buffer.as_seq h (LowStar.Monotonic.Buffer.mgsub sub_rel b i len))] | {
"end_col": 107,
"end_line": 202,
"start_col": 2,
"start_line": 199
} |
Prims.GTot | val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_1 = modifies_1' | val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = | false | null | false | modifies_1' | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.modifies_1'"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_1 | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 28,
"end_line": 646,
"start_col": 17,
"start_line": 646
} |
Prims.GTot | val modifies_1' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (h1 h2: HS.mem)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2 | val modifies_1' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (h1 h2: HS.mem)
: GTot Type0
let modifies_1' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (h1 h2: HS.mem)
: GTot Type0 = | false | null | false | modifies_0_preserves_regions h1 h2 /\ modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\ modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2 | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies_0_preserves_regions",
"LowStar.Monotonic.Buffer.modifies_1_preserves_mreferences",
"LowStar.Monotonic.Buffer.modifies_1_preserves_livenesses",
"LowStar.Monotonic.Buffer.modifies_0_preserves_not_unused_in",
"LowStar.Monotonic.Buffer.modifies_1_preserves_ubuffers"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_1' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (h1 h2: HS.mem)
: GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_1' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 41,
"end_line": 642,
"start_col": 4,
"start_line": 638
} |
Prims.GTot | val modifies_1_from_to
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
(h1 h2: HS.mem)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2 | val modifies_1_from_to
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
(h1 h2: HS.mem)
: GTot Type0
let modifies_1_from_to
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
(h1 h2: HS.mem)
: GTot Type0 = | false | null | false | if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\ modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\ modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2 | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer_from_to_none_cond",
"LowStar.Monotonic.Buffer.modifies_0",
"Prims.bool",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies_0_preserves_regions",
"LowStar.Monotonic.Buffer.modifies_1_preserves_mreferences",
"LowStar.Monotonic.Buffer.modifies_1_preserves_livenesses",
"LowStar.Monotonic.Buffer.modifies_0_preserves_not_unused_in",
"LowStar.Monotonic.Buffer.modifies_1_from_to_preserves_ubuffers"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_1_from_to
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(from to: U32.t)
(h1 h2: HS.mem)
: GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_1_from_to | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
from: FStar.UInt32.t ->
to: FStar.UInt32.t ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 59,
"end_line": 657,
"start_col": 4,
"start_line": 650
} |
FStar.Pervasives.Lemma | val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
) | val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f = | false | null | true | ubuffer_preserved_intro b
h1
h2
(fun t' _ _ b' -> if Null? b' then () else f _ _ (Buffer?.content b'))
(fun t' _ _ b' -> if Null? b' then () else f _ _ (Buffer?.content b')) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"LowStar.Monotonic.Buffer.ubuffer",
"FStar.Monotonic.HyperStack.mem",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"Prims.unit",
"Prims.l_and",
"FStar.Monotonic.HyperStack.contains",
"Prims.eq2",
"FStar.Monotonic.HyperStack.frameOf",
"Prims.int",
"Prims.l_or",
"Prims.b2t",
"Prims.op_GreaterThan",
"Prims.op_GreaterThanOrEqual",
"FStar.Monotonic.HyperStack.as_addr",
"Prims.squash",
"FStar.Monotonic.HyperStack.sel",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowStar.Monotonic.Buffer.ubuffer_preserved_intro",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.uu___is_Null",
"Prims.bool",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__max_length",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__content"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2) | [] | LowStar.Monotonic.Buffer.same_mreference_ubuffer_preserved | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.ubuffer r a ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem ->
f:
(
a': Type0 ->
pre: FStar.Preorder.preorder a' ->
r': FStar.Monotonic.HyperStack.mreference a' pre
-> FStar.Pervasives.Lemma
(requires
FStar.Monotonic.HyperStack.contains h1 r' /\
r == FStar.Monotonic.HyperStack.frameOf r' /\
a == FStar.Monotonic.HyperStack.as_addr r')
(ensures
FStar.Monotonic.HyperStack.contains h2 r' /\
FStar.Monotonic.HyperStack.sel h1 r' == FStar.Monotonic.HyperStack.sel h2 r'))
-> FStar.Pervasives.Lemma (ensures LowStar.Monotonic.Buffer.ubuffer_preserved b h1 h2) | {
"end_col": 3,
"end_line": 404,
"start_col": 2,
"start_line": 392
} |
FStar.Pervasives.Lemma | val unused_in_ubuffer_preserved
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h h': HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h')) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' | val unused_in_ubuffer_preserved
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h h': HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
let unused_in_ubuffer_preserved
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h h': HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h')) = | false | null | true | Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.addr_unused_in_ubuffer_preserved",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer",
"Prims.unit",
"LowStar.Monotonic.Buffer.unused_in_equiv",
"LowStar.Monotonic.Buffer.null_unique",
"LowStar.Monotonic.Buffer.live_null",
"FStar.Classical.move_requires",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"LowStar.Monotonic.Buffer.unused_in",
"Prims.l_False",
"LowStar.Monotonic.Buffer.live_not_unused_in",
"Prims.squash",
"LowStar.Monotonic.Buffer.ubuffer_preserved",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h)) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val unused_in_ubuffer_preserved
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h h': HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h')) | [] | LowStar.Monotonic.Buffer.unused_in_ubuffer_preserved | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma (requires LowStar.Monotonic.Buffer.unused_in b h)
(ensures
LowStar.Monotonic.Buffer.ubuffer_preserved (LowStar.Monotonic.Buffer.ubuffer_of_buffer b)
h
h') | {
"end_col": 89,
"end_line": 462,
"start_col": 4,
"start_line": 458
} |
Prims.GTot | val modifies_addr_of' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (h1 h2: HS.mem)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2 | val modifies_addr_of' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (h1 h2: HS.mem)
: GTot Type0
let modifies_addr_of' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (h1 h2: HS.mem)
: GTot Type0 = | false | null | false | modifies_0_preserves_regions h1 h2 /\ modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2 | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_and",
"LowStar.Monotonic.Buffer.modifies_0_preserves_regions",
"LowStar.Monotonic.Buffer.modifies_1_preserves_mreferences",
"LowStar.Monotonic.Buffer.modifies_addr_of_preserves_not_unused_in"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_addr_of' (#a: Type0) (#rrel #rel: srel a) (b: mbuffer a rrel rel) (h1 h2: HS.mem)
: GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_addr_of' | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 50,
"end_line": 738,
"start_col": 2,
"start_line": 736
} |
Prims.GTot | val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_addr_of = modifies_addr_of' | val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = | false | null | false | modifies_addr_of' | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.modifies_addr_of'"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2 | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_addr_of | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 40,
"end_line": 741,
"start_col": 23,
"start_line": 741
} |
FStar.Pervasives.Lemma | val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)] | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf () | val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)]
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 = | false | null | true | let prf ()
: Lemma
(requires
(compatible_sub b i1 len1 sub_rel1 /\
compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2)) =
lemma_seq_sub_compatibility_is_transitive (length b)
rel
(U32.v i1)
(U32.v i1 + U32.v len1)
sub_rel1
(U32.v i2)
(U32.v i2 + U32.v len2)
sub_rel2
in
Classical.move_requires prf () | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.UInt32.t",
"FStar.Classical.move_requires",
"Prims.unit",
"Prims.l_and",
"LowStar.Monotonic.Buffer.compatible_sub",
"LowStar.Monotonic.Buffer.mgsub",
"FStar.UInt32.add",
"Prims.l_Forall",
"FStar.Seq.Base.seq",
"Prims.l_imp",
"Prims.eq2",
"Prims.nat",
"FStar.Seq.Base.length",
"LowStar.Monotonic.Buffer.length",
"FStar.Seq.Base.slice",
"FStar.UInt32.v",
"Prims.op_Addition",
"Prims.int",
"Prims.op_Subtraction",
"FStar.Seq.Properties.replace_subseq",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowStar.Monotonic.Buffer.lemma_seq_sub_compatibility_is_transitive"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = () | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gsub_gsub (#a:Type0) (#rrel #rel:srel a)
(b:mbuffer a rrel rel)
(i1:U32.t) (len1:U32.t) (sub_rel1:srel a)
(i2: U32.t) (len2: U32.t) (sub_rel2:srel a)
:Lemma (requires (U32.v i1 + U32.v len1 <= length b /\
U32.v i2 + U32.v len2 <= U32.v len1))
(ensures (((compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2) ==> compatible_sub b (U32.add i1 i2) len2 sub_rel2) /\
mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2 == mgsub sub_rel2 b (U32.add i1 i2) len2))
[SMTPat (mgsub sub_rel2 (mgsub sub_rel1 b i1 len1) i2 len2)] | [] | LowStar.Monotonic.Buffer.gsub_gsub | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
i1: FStar.UInt32.t ->
len1: FStar.UInt32.t ->
sub_rel1: LowStar.Monotonic.Buffer.srel a ->
i2: FStar.UInt32.t ->
len2: FStar.UInt32.t ->
sub_rel2: LowStar.Monotonic.Buffer.srel a
-> FStar.Pervasives.Lemma
(requires
FStar.UInt32.v i1 + FStar.UInt32.v len1 <= LowStar.Monotonic.Buffer.length b /\
FStar.UInt32.v i2 + FStar.UInt32.v len2 <= FStar.UInt32.v len1)
(ensures
(LowStar.Monotonic.Buffer.compatible_sub b i1 len1 sub_rel1 /\
LowStar.Monotonic.Buffer.compatible_sub (LowStar.Monotonic.Buffer.mgsub sub_rel1 b i1 len1
)
i2
len2
sub_rel2 ==>
LowStar.Monotonic.Buffer.compatible_sub b (FStar.UInt32.add i1 i2) len2 sub_rel2) /\
LowStar.Monotonic.Buffer.mgsub sub_rel2
(LowStar.Monotonic.Buffer.mgsub sub_rel1 b i1 len1)
i2
len2 ==
LowStar.Monotonic.Buffer.mgsub sub_rel2 b (FStar.UInt32.add i1 i2) len2)
[
SMTPat (LowStar.Monotonic.Buffer.mgsub sub_rel2
(LowStar.Monotonic.Buffer.mgsub sub_rel1 b i1 len1)
i2
len2)
] | {
"end_col": 32,
"end_line": 190,
"start_col": 62,
"start_line": 182
} |
FStar.Pervasives.Lemma | val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b)) | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b) | val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f = | false | null | true | if Null? b then () else f _ _ (Buffer?.content b) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"LowStar.Monotonic.Buffer.srel",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"Prims.unit",
"Prims.l_and",
"Prims.eq2",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Monotonic.HyperStack.frameOf",
"LowStar.Monotonic.Buffer.frameOf",
"Prims.int",
"Prims.l_or",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"Prims.op_GreaterThan",
"FStar.Monotonic.HyperStack.as_addr",
"LowStar.Monotonic.Buffer.as_addr",
"FStar.Monotonic.HyperStack.contains",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowStar.Monotonic.Buffer.uu___is_Null",
"Prims.bool",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__max_length",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"LowStar.Monotonic.Buffer.__proj__Buffer__item__content"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b)) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b)) | [] | LowStar.Monotonic.Buffer.liveness_preservation_intro | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
f:
(
t': Type0 ->
pre: FStar.Preorder.preorder t' ->
r: FStar.Monotonic.HyperStack.mreference t' pre
-> FStar.Pervasives.Lemma
(requires
FStar.Monotonic.HyperStack.frameOf r == LowStar.Monotonic.Buffer.frameOf b /\
FStar.Monotonic.HyperStack.as_addr r == LowStar.Monotonic.Buffer.as_addr b /\
FStar.Monotonic.HyperStack.contains h r)
(ensures FStar.Monotonic.HyperStack.contains h' r))
-> FStar.Pervasives.Lemma (requires LowStar.Monotonic.Buffer.live h b)
(ensures LowStar.Monotonic.Buffer.live h' b) | {
"end_col": 32,
"end_line": 552,
"start_col": 2,
"start_line": 550
} |
Prims.GTot | val loc_union
(s1 s2: loc)
: GTot loc | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_union = MG.loc_union | val loc_union
(s1 s2: loc)
: GTot loc
let loc_union = | false | null | false | MG.loc_union | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"FStar.ModifiesGen.loc_union",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_union
(s1 s2: loc)
: GTot loc | [] | LowStar.Monotonic.Buffer.loc_union | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s1: LowStar.Monotonic.Buffer.loc -> s2: LowStar.Monotonic.Buffer.loc
-> Prims.GTot LowStar.Monotonic.Buffer.loc | {
"end_col": 28,
"end_line": 785,
"start_col": 16,
"start_line": 785
} |
Prims.GTot | val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) | val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc
let loc_buffer #_ #_ #_ b = | false | null | false | if g_is_null b
then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.ModifiesGen.loc_none",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls",
"Prims.bool",
"FStar.ModifiesGen.loc_of_aloc",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.ubuffer_of_buffer",
"LowStar.Monotonic.Buffer.loc"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_buffer (#a:Type0) (#rrel #rel:srel a) (b:mbuffer a rrel rel) :GTot loc | [] | LowStar.Monotonic.Buffer.loc_buffer | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | b: LowStar.Monotonic.Buffer.mbuffer a rrel rel -> Prims.GTot LowStar.Monotonic.Buffer.loc | {
"end_col": 75,
"end_line": 806,
"start_col": 2,
"start_line": 805
} |
FStar.Pervasives.Lemma | val ubuffer_preserved_intro
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h': HS.mem)
(f0:
(t': Type0 -> rrel: srel t' -> rel: srel t' -> b': mbuffer t' rrel rel
-> Lemma (requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))))
(f:
(t': Type0 -> rrel: srel t' -> rel: srel t' -> b': mbuffer t' rrel rel
-> Lemma
(requires
(frameOf b' == r /\ as_addr b' == a /\ live h b' /\ live h' b' /\ Buffer? b' /\
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen } =
Ghost.reveal b
in
let Buffer max _ idx len = b' in
(U32.v max == bmax /\ U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len))))
(ensures
(Buffer? b' /\
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen } =
Ghost.reveal b
in
let Buffer max _ idx len = b' in
U32.v max == bmax /\ U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b')
(boff - U32.v idx)
(boff - U32.v idx + blen))
(Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen)))))
))
: Lemma (ubuffer_preserved b h h') | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g' | val ubuffer_preserved_intro
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h': HS.mem)
(f0:
(t': Type0 -> rrel: srel t' -> rel: srel t' -> b': mbuffer t' rrel rel
-> Lemma (requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))))
(f:
(t': Type0 -> rrel: srel t' -> rel: srel t' -> b': mbuffer t' rrel rel
-> Lemma
(requires
(frameOf b' == r /\ as_addr b' == a /\ live h b' /\ live h' b' /\ Buffer? b' /\
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen } =
Ghost.reveal b
in
let Buffer max _ idx len = b' in
(U32.v max == bmax /\ U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len))))
(ensures
(Buffer? b' /\
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen } =
Ghost.reveal b
in
let Buffer max _ idx len = b' in
U32.v max == bmax /\ U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b')
(boff - U32.v idx)
(boff - U32.v idx + blen))
(Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen)))))
))
: Lemma (ubuffer_preserved b h h')
let ubuffer_preserved_intro
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h': HS.mem)
(f0:
(t': Type0 -> rrel: srel t' -> rel: srel t' -> b': mbuffer t' rrel rel
-> Lemma (requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))))
(f:
(t': Type0 -> rrel: srel t' -> rel: srel t' -> b': mbuffer t' rrel rel
-> Lemma
(requires
(frameOf b' == r /\ as_addr b' == a /\ live h b' /\ live h' b' /\ Buffer? b' /\
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen } =
Ghost.reveal b
in
let Buffer max _ idx len = b' in
(U32.v max == bmax /\ U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len))))
(ensures
(Buffer? b' /\
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen } =
Ghost.reveal b
in
let Buffer max _ idx len = b' in
U32.v max == bmax /\ U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b')
(boff - U32.v idx)
(boff - U32.v idx + blen))
(Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen)))))
))
: Lemma (ubuffer_preserved b h h') = | false | null | true | let g' (t': Type0) (rrel rel: srel t') (b': mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==>
((live h b' ==> live h' b') /\
(((live h b' /\ live h' b' /\ Buffer? b') ==>
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen } = Ghost.reveal b in
let Buffer max _ idx len = b' in
(U32.v max == bmax /\ U32.v idx <= boff /\ boff + blen <= U32.v idx + U32.v len) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen))
(Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))))))) =
Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g' | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"lemma"
] | [
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"LowStar.Monotonic.Buffer.ubuffer",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.unit",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"LowStar.Monotonic.Buffer.live",
"Prims.squash",
"Prims.Nil",
"FStar.Pervasives.pattern",
"Prims.b2t",
"LowStar.Monotonic.Buffer.uu___is_Buffer",
"Prims.bool",
"FStar.UInt32.t",
"FStar.HyperStack.ST.mreference",
"FStar.Seq.Properties.lseq",
"FStar.UInt32.v",
"LowStar.Monotonic.Buffer.srel_to_lsrel",
"FStar.Ghost.erased",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Ghost.reveal",
"Prims.int",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt32.n",
"Prims.logical",
"LowStar.Monotonic.Buffer.ubuffer'",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.slice",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.op_Subtraction",
"FStar.Classical.forall_intro_4",
"Prims.l_imp",
"FStar.Preorder.preorder",
"FStar.Seq.Base.seq",
"Prims.l_True",
"FStar.Classical.move_requires",
"LowStar.Monotonic.Buffer.ubuffer_preserved"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ubuffer_preserved_intro
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h': HS.mem)
(f0:
(t': Type0 -> rrel: srel t' -> rel: srel t' -> b': mbuffer t' rrel rel
-> Lemma (requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))))
(f:
(t': Type0 -> rrel: srel t' -> rel: srel t' -> b': mbuffer t' rrel rel
-> Lemma
(requires
(frameOf b' == r /\ as_addr b' == a /\ live h b' /\ live h' b' /\ Buffer? b' /\
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen } =
Ghost.reveal b
in
let Buffer max _ idx len = b' in
(U32.v max == bmax /\ U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len))))
(ensures
(Buffer? b' /\
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen } =
Ghost.reveal b
in
let Buffer max _ idx len = b' in
U32.v max == bmax /\ U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b')
(boff - U32.v idx)
(boff - U32.v idx + blen))
(Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen)))))
))
: Lemma (ubuffer_preserved b h h') | [] | LowStar.Monotonic.Buffer.ubuffer_preserved_intro | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.ubuffer r a ->
h: FStar.Monotonic.HyperStack.mem ->
h': FStar.Monotonic.HyperStack.mem ->
f0:
(
t': Type0 ->
rrel: LowStar.Monotonic.Buffer.srel t' ->
rel: LowStar.Monotonic.Buffer.srel t' ->
b': LowStar.Monotonic.Buffer.mbuffer t' rrel rel
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.frameOf b' == r /\ LowStar.Monotonic.Buffer.as_addr b' == a /\
LowStar.Monotonic.Buffer.live h b') (ensures LowStar.Monotonic.Buffer.live h' b')) ->
f:
(
t': Type0 ->
rrel: LowStar.Monotonic.Buffer.srel t' ->
rel: LowStar.Monotonic.Buffer.srel t' ->
b': LowStar.Monotonic.Buffer.mbuffer t' rrel rel
-> FStar.Pervasives.Lemma
(requires
LowStar.Monotonic.Buffer.frameOf b' == r /\ LowStar.Monotonic.Buffer.as_addr b' == a /\
LowStar.Monotonic.Buffer.live h b' /\ LowStar.Monotonic.Buffer.live h' b' /\
Buffer? b' /\
(let _ = FStar.Ghost.reveal b in
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen ; b_is_mm = _ } =
_
in
let _ = b' in
(let LowStar.Monotonic.Buffer.Buffer #_ #_ #_ max _ idx len = _ in
FStar.UInt32.v max == bmax /\ FStar.UInt32.v idx <= boff /\
boff + blen <= FStar.UInt32.v idx + FStar.UInt32.v (FStar.Ghost.reveal len))
<:
Prims.logical)
<:
Prims.logical))
(ensures
Buffer? b' /\
(let _ = FStar.Ghost.reveal b in
(let { b_max_length = bmax ; b_offset = boff ; b_length = blen ; b_is_mm = _ } =
_
in
let _ = b' in
(let LowStar.Monotonic.Buffer.Buffer #_ #_ #_ max _ idx len = _ in
FStar.UInt32.v max == bmax /\ FStar.UInt32.v idx <= boff /\
boff + blen <= FStar.UInt32.v idx + FStar.UInt32.v (FStar.Ghost.reveal len) /\
FStar.Seq.Base.equal (FStar.Seq.Base.slice (LowStar.Monotonic.Buffer.as_seq h
b')
(boff - FStar.UInt32.v idx)
(boff - FStar.UInt32.v idx + blen))
(FStar.Seq.Base.slice (LowStar.Monotonic.Buffer.as_seq h' b')
(boff - FStar.UInt32.v idx)
(boff - FStar.UInt32.v idx + blen)))
<:
Prims.logical)
<:
Prims.logical)))
-> FStar.Pervasives.Lemma (ensures LowStar.Monotonic.Buffer.ubuffer_preserved b h h') | {
"end_col": 29,
"end_line": 362,
"start_col": 1,
"start_line": 344
} |
Prims.GTot | val modifies_1_preserves_mreferences
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r') | val modifies_1_preserves_mreferences
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0
let modifies_1_preserves_mreferences
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 = | false | null | false | forall (a': Type) (pre: Preorder.preorder a') (r': HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r') | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_Forall",
"FStar.Preorder.preorder",
"FStar.Monotonic.HyperStack.mreference",
"Prims.l_imp",
"Prims.l_and",
"Prims.l_or",
"Prims.b2t",
"Prims.op_disEquality",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"FStar.Monotonic.HyperStack.frameOf",
"Prims.int",
"Prims.op_GreaterThanOrEqual",
"Prims.op_GreaterThan",
"LowStar.Monotonic.Buffer.as_addr",
"FStar.Monotonic.HyperStack.as_addr",
"FStar.Monotonic.HyperStack.contains",
"Prims.eq2",
"FStar.Monotonic.HyperStack.sel"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_1_preserves_mreferences
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_1_preserves_mreferences | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 59,
"end_line": 620,
"start_col": 4,
"start_line": 618
} |
Prims.GTot | val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_regions = MG.loc_regions | val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc
let loc_regions = | false | null | false | MG.loc_regions | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"FStar.ModifiesGen.loc_regions",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_regions
(preserve_liveness: bool)
(r: Set.set HS.rid)
: GTot loc | [] | LowStar.Monotonic.Buffer.loc_regions | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | preserve_liveness: Prims.bool -> r: FStar.Set.set FStar.Monotonic.HyperHeap.rid
-> Prims.GTot LowStar.Monotonic.Buffer.loc | {
"end_col": 32,
"end_line": 826,
"start_col": 18,
"start_line": 826
} |
Prims.GTot | val loc_includes
(s1 s2: loc)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.ModifiesGen",
"short_module": "MG"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let loc_includes = MG.loc_includes | val loc_includes
(s1 s2: loc)
: GTot Type0
let loc_includes = | false | null | false | MG.loc_includes | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"FStar.ModifiesGen.loc_includes",
"LowStar.Monotonic.Buffer.ubuffer",
"LowStar.Monotonic.Buffer.cls"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r))
let modifies_addr_of' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0 =
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_addr_of_preserves_not_unused_in b h1 h2
val modifies_addr_of (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_addr_of = modifies_addr_of'
val modifies_addr_of_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_addr_of b h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_addr_of_live_region #_ #_ #_ _ _ _ _ = ()
val modifies_addr_of_mreference (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
: Lemma (requires (modifies_addr_of b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_addr_of_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
val modifies_addr_of_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
: Lemma (requires (modifies_addr_of b h1 h2 /\
(r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_addr_of_unused_in #_ #_ #_ _ _ _ _ _ = ()
module MG = FStar.ModifiesGen
let cls : MG.cls ubuffer = MG.Cls #ubuffer
ubuffer_includes
(fun #r #a x -> ubuffer_includes_refl x)
(fun #r #a x1 x2 x3 -> ubuffer_includes_trans x1 x2 x3)
ubuffer_disjoint
(fun #r #a x1 x2 -> ubuffer_disjoint_sym x1 x2)
(fun #r #a larger1 larger2 smaller1 smaller2 -> ubuffer_disjoint_includes larger1 larger2 smaller1 smaller2)
ubuffer_preserved
(fun #r #a x h -> ubuffer_preserved_refl x h)
(fun #r #a x h1 h2 h3 -> ubuffer_preserved_trans x h1 h2 h3)
(fun #r #a b h1 h2 f -> same_mreference_ubuffer_preserved b h1 h2 f)
let loc = MG.loc cls
let _ = intro_ambient loc
let loc_none = MG.loc_none
let _ = intro_ambient loc_none
let loc_union = MG.loc_union
let _ = intro_ambient loc_union
let loc_union_idem = MG.loc_union_idem
let loc_union_comm = MG.loc_union_comm
let loc_union_assoc = MG.loc_union_assoc
let loc_union_loc_none_l = MG.loc_union_loc_none_l
let loc_union_loc_none_r = MG.loc_union_loc_none_r
let loc_buffer_from_to #a #rrel #rel b from to =
if ubuffer_of_buffer_from_to_none_cond b from to
then MG.loc_none
else
MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to)
let loc_buffer #_ #_ #_ b =
if g_is_null b then MG.loc_none
else MG.loc_of_aloc #_ #_ #(frameOf b) #(as_addr b) (ubuffer_of_buffer b)
let loc_buffer_eq #_ #_ #_ _ = ()
let loc_buffer_from_to_high #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_none #_ #_ #_ _ _ _ = ()
let loc_buffer_from_to_mgsub #_ #_ #_ _ _ _ _ _ _ = ()
let loc_buffer_mgsub_eq #_ #_ #_ _ _ _ _ = ()
let loc_buffer_null _ _ _ = ()
let loc_buffer_from_to_eq #_ #_ #_ _ _ _ = ()
let loc_buffer_mgsub_rel_eq #_ #_ #_ _ _ _ _ _ = ()
let loc_addresses = MG.loc_addresses
let loc_regions = MG.loc_regions | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val loc_includes
(s1 s2: loc)
: GTot Type0 | [] | LowStar.Monotonic.Buffer.loc_includes | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s1: LowStar.Monotonic.Buffer.loc -> s2: LowStar.Monotonic.Buffer.loc -> Prims.GTot Type0 | {
"end_col": 34,
"end_line": 828,
"start_col": 19,
"start_line": 828
} |
Prims.GTot | val modifies_addr_of_preserves_not_unused_in
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 | [
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Ghost",
"short_module": "G"
},
{
"abbrev": true,
"full_module": "FStar.Preorder",
"short_module": "P"
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Monotonic",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (r: HS.rid) (n: nat) .
((r <> frameOf b \/ n <> as_addr b) /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)) ==>
(n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)) | val modifies_addr_of_preserves_not_unused_in
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0
let modifies_addr_of_preserves_not_unused_in
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 = | false | null | false | forall (r: HS.rid) (n: nat).
((r <> frameOf b \/ n <> as_addr b) /\ HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` ((HS.get_hmap h2) `Map.sel` r)) ==>
(n `Heap.addr_unused_in` ((HS.get_hmap h1) `Map.sel` r)) | {
"checked_file": "LowStar.Monotonic.Buffer.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Set.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Preorder.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.ModifiesGen.fsti.checked",
"FStar.Map.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Heap.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowStar.Monotonic.Buffer.fst"
} | [
"sometrivial"
] | [
"LowStar.Monotonic.Buffer.srel",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_Forall",
"FStar.Monotonic.HyperHeap.rid",
"Prims.nat",
"Prims.l_imp",
"Prims.l_and",
"Prims.l_or",
"Prims.b2t",
"Prims.op_disEquality",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.as_addr",
"FStar.Monotonic.HyperStack.live_region",
"FStar.Monotonic.Heap.addr_unused_in",
"FStar.Map.sel",
"FStar.Monotonic.Heap.heap",
"FStar.Monotonic.HyperStack.get_hmap"
] | [] | (*
Copyright 2008-2018 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module LowStar.Monotonic.Buffer
module P = FStar.Preorder
module G = FStar.Ghost
module U32 = FStar.UInt32
module Seq = FStar.Seq
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
private let srel_to_lsrel (#a:Type0) (len:nat) (pre:srel a) :P.preorder (Seq.lseq a len) = pre
(*
* Counterpart of compatible_sub from the fsti but using sequences
*
* The patterns are guarded tightly, the proof of transitivity gets quite flaky otherwise
* The cost is that we have to additional asserts as triggers
*)
let compatible_sub_preorder (#a:Type0)
(len:nat) (rel:srel a) (i:nat) (j:nat{i <= j /\ j <= len}) (sub_rel:srel a)
= compatible_subseq_preorder len rel i j sub_rel
(*
* Reflexivity of the compatibility relation
*)
let lemma_seq_sub_compatilibity_is_reflexive (#a:Type0) (len:nat) (rel:srel a)
:Lemma (compatible_sub_preorder len rel 0 len rel)
= assert (forall (s1 s2:Seq.seq a). Seq.length s1 == Seq.length s2 ==>
Seq.equal (Seq.replace_subseq s1 0 (Seq.length s1) s2) s2)
(*
* Transitivity of the compatibility relation
*
* i2 and j2 are relative offsets within [i1, j1) (i.e. assuming i1 = 0)
*)
let lemma_seq_sub_compatibility_is_transitive (#a:Type0)
(len:nat) (rel:srel a) (i1 j1:nat) (rel1:srel a) (i2 j2:nat) (rel2:srel a)
:Lemma (requires (i1 <= j1 /\ j1 <= len /\ i2 <= j2 /\ j2 <= j1 - i1 /\
compatible_sub_preorder len rel i1 j1 rel1 /\
compatible_sub_preorder (j1 - i1) rel1 i2 j2 rel2))
(ensures (compatible_sub_preorder len rel (i1 + i2) (i1 + j2) rel2))
= let t1 (s1 s2:Seq.seq a) = Seq.length s1 == len /\ Seq.length s2 == len /\ rel s1 s2 in
let t2 (s1 s2:Seq.seq a) = t1 s1 s2 /\ rel2 (Seq.slice s1 (i1 + i2) (i1 + j2)) (Seq.slice s2 (i1 + i2) (i1 + j2)) in
let aux0 (s1 s2:Seq.seq a) :Lemma (t1 s1 s2 ==> t2 s1 s2)
= Classical.arrow_to_impl #(t1 s1 s2) #(t2 s1 s2)
(fun _ ->
assert (rel1 (Seq.slice s1 i1 j1) (Seq.slice s2 i1 j1));
assert (rel2 (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice (Seq.slice s2 i1 j1) i2 j2));
assert (Seq.equal (Seq.slice (Seq.slice s1 i1 j1) i2 j2) (Seq.slice s1 (i1 + i2) (i1 + j2)));
assert (Seq.equal (Seq.slice (Seq.slice s2 i1 j1) i2 j2) (Seq.slice s2 (i1 + i2) (i1 + j2))))
in
let t1 (s s2:Seq.seq a) = Seq.length s == len /\ Seq.length s2 == j2 - i2 /\
rel2 (Seq.slice s (i1 + i2) (i1 + j2)) s2 in
let t2 (s s2:Seq.seq a) = t1 s s2 /\ rel s (Seq.replace_subseq s (i1 + i2) (i1 + j2) s2) in
let aux1 (s s2:Seq.seq a) :Lemma (t1 s s2 ==> t2 s s2)
= Classical.arrow_to_impl #(t1 s s2) #(t2 s s2)
(fun _ ->
assert (Seq.equal (Seq.slice s (i1 + i2) (i1 + j2)) (Seq.slice (Seq.slice s i1 j1) i2 j2));
assert (rel1 (Seq.slice s i1 j1) (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2));
assert (rel s (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2)));
assert (Seq.equal (Seq.replace_subseq s i1 j1 (Seq.replace_subseq (Seq.slice s i1 j1) i2 j2 s2))
(Seq.replace_subseq s (i1 + i2) (i1 + j2) s2)))
in
Classical.forall_intro_2 aux0; Classical.forall_intro_2 aux1
noeq type mbuffer (a:Type0) (rrel:srel a) (rel:srel a) :Type0 =
| Null
| Buffer:
max_length:U32.t ->
content:HST.mreference (Seq.lseq a (U32.v max_length)) (srel_to_lsrel (U32.v max_length) rrel) ->
idx:U32.t ->
length:Ghost.erased U32.t{U32.v idx + U32.v (Ghost.reveal length) <= U32.v max_length} ->
mbuffer a rrel rel
let g_is_null #_ #_ #_ b = Null? b
let mnull #_ #_ #_ = Null
let null_unique #_ #_ #_ _ = ()
let unused_in #_ #_ #_ b h =
match b with
| Null -> False
| Buffer _ content _ _ -> content `HS.unused_in` h
let buffer_compatible (#t: Type) (#rrel #rel: srel t) (b: mbuffer t rrel rel) : GTot Type0 =
match b with
| Null -> True
| Buffer max_length content idx length ->
compatible_sub_preorder (U32.v max_length) rrel
(U32.v idx) (U32.v idx + U32.v length) rel //proof of compatibility
let live #_ #rrel #rel h b =
match b with
| Null -> True
| Buffer max_length content idx length ->
h `HS.contains` content /\
buffer_compatible b
let live_null _ _ _ _ = ()
let live_not_unused_in #_ #_ #_ _ _ = ()
let lemma_live_equal_mem_domains #_ #_ #_ _ _ _ = ()
let frameOf #_ #_ #_ b = if Null? b then HS.root else HS.frameOf (Buffer?.content b)
let as_addr #_ #_ #_ b = if g_is_null b then 0 else HS.as_addr (Buffer?.content b)
let unused_in_equiv #_ #_ #_ b h =
if g_is_null b then Heap.not_addr_unused_in_nullptr (Map.sel (HS.get_hmap h) HS.root) else ()
let live_region_frameOf #_ #_ #_ _ _ = ()
let len #_ #_ #_ b =
match b with
| Null -> 0ul
| Buffer _ _ _ len -> len
let len_null a _ _ = ()
let as_seq #_ #_ #_ h b =
match b with
| Null -> Seq.empty
| Buffer max_len content idx len ->
Seq.slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len)
let length_as_seq #_ #_ #_ _ _ = ()
let mbuffer_injectivity_in_first_preorder () = ()
let mgsub #a #rrel #rel sub_rel b i len =
match b with
| Null -> Null
| Buffer max_len content idx length ->
Buffer max_len content (U32.add idx i) (Ghost.hide len)
let live_gsub #_ #rrel #rel _ b i len sub_rel =
match b with
| Null -> ()
| Buffer max_len content idx length ->
let prf () : Lemma
(requires (buffer_compatible b))
(ensures (buffer_compatible (mgsub sub_rel b i len)))
=
lemma_seq_sub_compatibility_is_transitive (U32.v max_len) rrel
(U32.v idx) (U32.v idx + U32.v length) rel
(U32.v i) (U32.v i + U32.v len) sub_rel
in
Classical.move_requires prf ()
let gsub_is_null #_ #_ #_ _ _ _ _ = ()
let len_gsub #_ #_ #_ _ _ _ _ = ()
let frameOf_gsub #_ #_ #_ _ _ _ _ = ()
let as_addr_gsub #_ #_ #_ _ _ _ _ = ()
let mgsub_inj #_ #_ #_ _ _ _ _ _ _ _ _ = ()
#push-options "--z3rlimit 20"
let gsub_gsub #_ #_ #rel b i1 len1 sub_rel1 i2 len2 sub_rel2 =
let prf () : Lemma
(requires (compatible_sub b i1 len1 sub_rel1 /\ compatible_sub (mgsub sub_rel1 b i1 len1) i2 len2 sub_rel2))
(ensures (compatible_sub b (U32.add i1 i2) len2 sub_rel2))
=
lemma_seq_sub_compatibility_is_transitive (length b) rel (U32.v i1) (U32.v i1 + U32.v len1) sub_rel1
(U32.v i2) (U32.v i2 + U32.v len2) sub_rel2
in
Classical.move_requires prf ()
#pop-options
/// A buffer ``b`` is equal to its "largest" sub-buffer, at index 0 and
/// length ``len b``.
let gsub_zero_length #_ #_ #rel b = lemma_seq_sub_compatilibity_is_reflexive (length b) rel
let as_seq_gsub #_ #_ #_ h b i len _ =
match b with
| Null -> ()
| Buffer _ content idx len0 ->
Seq.slice_slice (HS.sel h content) (U32.v idx) (U32.v idx + U32.v len0) (U32.v i) (U32.v i + U32.v len)
let lemma_equal_instances_implies_equal_types (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b)
: Lemma (requires s1 === s2)
(ensures a == b)
= Seq.lemma_equal_instances_implies_equal_types ()
let s_lemma_equal_instances_implies_equal_types (_:unit)
: Lemma (forall (a:Type) (b:Type) (s1:Seq.seq a) (s2:Seq.seq b).
{:pattern (has_type s1 (Seq.seq a));
(has_type s2 (Seq.seq b)) }
s1 === s2 ==> a == b)
= Seq.lemma_equal_instances_implies_equal_types()
let live_same_addresses_equal_types_and_preorders'
(#a1 #a2: Type0)
(#rrel1 #rel1: srel a1)
(#rrel2 #rel2: srel a2)
(b1: mbuffer a1 rrel1 rel1)
(b2: mbuffer a2 rrel2 rel2)
(h: HS.mem)
: Lemma
(requires
frameOf b1 == frameOf b2 /\
as_addr b1 == as_addr b2 /\
live h b1 /\
live h b2 /\
(~ (g_is_null b1 /\ g_is_null b2)))
(ensures
a1 == a2 /\
rrel1 == rrel2)
= Heap.lemma_distinct_addrs_distinct_preorders ();
Heap.lemma_distinct_addrs_distinct_mm ();
let s1 : Seq.seq a1 = as_seq h b1 in
assert (Seq.seq a1 == Seq.seq a2);
let s1' : Seq.seq a2 = coerce_eq _ s1 in
assert (s1 === s1');
lemma_equal_instances_implies_equal_types a1 a2 s1 s1'
let live_same_addresses_equal_types_and_preorders
#_ #_ #_ #_ #_ #_ b1 b2 h
= Classical.move_requires (live_same_addresses_equal_types_and_preorders' b1 b2) h
(* Untyped view of buffers, used only to implement the generic modifies clause. DO NOT USE in client code. *)
noeq
type ubuffer_
: Type0
= {
b_max_length: nat;
b_offset: nat;
b_length: nat;
b_is_mm: bool;
}
val ubuffer' (region: HS.rid) (addr: nat) : Tot Type0
let ubuffer' region addr = (x: ubuffer_ { x.b_offset + x.b_length <= x.b_max_length } )
let ubuffer (region: HS.rid) (addr: nat) : Tot Type0 = G.erased (ubuffer' region addr)
let ubuffer_of_buffer' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel)
:Tot (ubuffer (frameOf b) (as_addr b))
= if Null? b
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
Ghost.hide ({
b_max_length = U32.v (Buffer?.max_length b);
b_offset = U32.v (Buffer?.idx b);
b_length = U32.v (Buffer?.length b);
b_is_mm = HS.is_mm (Buffer?.content b);
})
let ubuffer_preserved'
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h h' : HS.mem)
: GTot Type0
= forall (t':Type0) (rrel rel:srel t') (b':mbuffer t' rrel rel) .
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
val ubuffer_preserved (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h h' : HS.mem) : GTot Type0
let ubuffer_preserved = ubuffer_preserved'
let ubuffer_preserved_intro
(#r:HS.rid)
(#a:nat)
(b:ubuffer r a)
(h h' :HS.mem)
(f0: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (frameOf b' == r /\ as_addr b' == a /\ live h b'))
(ensures (live h' b'))
))
(f: (
(t':Type0) ->
(rrel:srel t') -> (rel:srel t') ->
(b':mbuffer t' rrel rel) ->
Lemma
(requires (
frameOf b' == r /\ as_addr b' == a /\
live h b' /\ live h' b' /\
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
))))
(ensures (
Buffer? b' /\ (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len /\
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))
))
: Lemma
(ubuffer_preserved b h h')
= let g'
(t':Type0) (rrel rel:srel t')
(b':mbuffer t' rrel rel)
: Lemma
((frameOf b' == r /\ as_addr b' == a) ==> (
(live h b' ==> live h' b') /\ (
((live h b' /\ live h' b' /\ Buffer? b') ==> (
let ({ b_max_length = bmax; b_offset = boff; b_length = blen }) = Ghost.reveal b in
let Buffer max _ idx len = b' in (
U32.v max == bmax /\
U32.v idx <= boff /\
boff + blen <= U32.v idx + U32.v len
) ==>
Seq.equal (Seq.slice (as_seq h b') (boff - U32.v idx) (boff - U32.v idx + blen)) (Seq.slice (as_seq h' b') (boff - U32.v idx) (boff - U32.v idx + blen))
)))))
= Classical.move_requires (f0 t' rrel rel) b';
Classical.move_requires (f t' rrel rel) b'
in
Classical.forall_intro_4 g'
val ubuffer_preserved_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h : HS.mem) : Lemma
(ubuffer_preserved b h h)
let ubuffer_preserved_refl #r #a b h = ()
val ubuffer_preserved_trans (#r: HS.rid) (#a: nat) (b: ubuffer r a) (h1 h2 h3 : HS.mem) : Lemma
(requires (ubuffer_preserved b h1 h2 /\ ubuffer_preserved b h2 h3))
(ensures (ubuffer_preserved b h1 h3))
let ubuffer_preserved_trans #r #a b h1 h2 h3 = ()
val same_mreference_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
(f: (
(a' : Type) ->
(pre: Preorder.preorder a') ->
(r': HS.mreference a' pre) ->
Lemma
(requires (h1 `HS.contains` r' /\ r == HS.frameOf r' /\ a == HS.as_addr r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
))
: Lemma
(ubuffer_preserved b h1 h2)
let same_mreference_ubuffer_preserved #r #a b h1 h2 f =
ubuffer_preserved_intro b h1 h2
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
(fun t' _ _ b' ->
if Null? b'
then ()
else
f _ _ (Buffer?.content b')
)
val addr_unused_in_ubuffer_preserved
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (HS.live_region h1 r ==> a `Heap.addr_unused_in` (Map.sel (HS.get_hmap h1) r)))
(ensures (ubuffer_preserved b h1 h2))
let addr_unused_in_ubuffer_preserved #r #a b h1 h2 = ()
val ubuffer_of_buffer (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) :Tot (ubuffer (frameOf b) (as_addr b))
let ubuffer_of_buffer #_ #_ #_ b = ubuffer_of_buffer' b
let ubuffer_of_buffer_from_to_none_cond
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot bool
= g_is_null b || U32.v to < U32.v from || U32.v from > length b
let ubuffer_of_buffer_from_to
#a #rrel #rel (b: mbuffer a rrel rel) from to
: GTot (ubuffer (frameOf b) (as_addr b))
= if ubuffer_of_buffer_from_to_none_cond b from to
then
Ghost.hide ({
b_max_length = 0;
b_offset = 0;
b_length = 0;
b_is_mm = false;
})
else
let to' = if U32.v to > length b then length b else U32.v to in
let b1 = ubuffer_of_buffer b in
Ghost.hide ({ Ghost.reveal b1 with b_offset = (Ghost.reveal b1).b_offset + U32.v from; b_length = to' - U32.v from })
val ubuffer_preserved_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h h':HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h' /\ live h b))
(ensures (live h' b /\ as_seq h b == as_seq h' b))
let ubuffer_preserved_elim #_ #_ #_ _ _ _ = ()
val ubuffer_preserved_from_to_elim (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h h' : HS.mem)
:Lemma (requires (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) h h' /\ live h b))
(ensures (live h' b /\ ((U32.v from <= U32.v to /\ U32.v to <= length b) ==> Seq.slice (as_seq h b) (U32.v from) (U32.v to) == Seq.slice (as_seq h' b) (U32.v from) (U32.v to))))
let ubuffer_preserved_from_to_elim #_ #_ #_ _ _ _ _ _ = ()
let unused_in_ubuffer_preserved (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h h':HS.mem)
: Lemma (requires (b `unused_in` h))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'))
= Classical.move_requires (fun b -> live_not_unused_in h b) b;
live_null a rrel rel h;
null_unique b;
unused_in_equiv b h;
addr_unused_in_ubuffer_preserved #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) h h'
let ubuffer_includes' (larger smaller: ubuffer_) : GTot Type0 =
larger.b_is_mm == smaller.b_is_mm /\
larger.b_max_length == smaller.b_max_length /\
larger.b_offset <= smaller.b_offset /\
smaller.b_offset + smaller.b_length <= larger.b_offset + larger.b_length
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_includes0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (larger:ubuffer r1 a1) (smaller:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\ ubuffer_includes' (G.reveal larger) (G.reveal smaller)
val ubuffer_includes (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) : GTot Type0
let ubuffer_includes #r #a larger smaller = ubuffer_includes0 larger smaller
val ubuffer_includes_refl (#r: HS.rid) (#a: nat) (b: ubuffer r a) : Lemma
(b `ubuffer_includes` b)
let ubuffer_includes_refl #r #a b = ()
val ubuffer_includes_trans (#r: HS.rid) (#a: nat) (b1 b2 b3: ubuffer r a) : Lemma
(requires (b1 `ubuffer_includes` b2 /\ b2 `ubuffer_includes` b3))
(ensures (b1 `ubuffer_includes` b3))
let ubuffer_includes_trans #r #a b1 b2 b3 = ()
(*
* TODO: not sure how to make this lemma work with preorders
* it creates a buffer larger' in the proof
* we need a compatible preorder for that
* may be take that as an argument?
*)
(*val ubuffer_includes_ubuffer_preserved (#r: HS.rid) (#a: nat) (larger smaller: ubuffer r a) (h1 h2: HS.mem) : Lemma
(requires (larger `ubuffer_includes` smaller /\ ubuffer_preserved larger h1 h2))
(ensures (ubuffer_preserved smaller h1 h2))
let ubuffer_includes_ubuffer_preserved #r #a larger smaller h1 h2 =
ubuffer_preserved_intro smaller h1 h2 (fun t' b' ->
if Null? b'
then ()
else
let (Buffer max_len content idx' len') = b' in
let idx = U32.uint_to_t (G.reveal larger).b_offset in
let len = U32.uint_to_t (G.reveal larger).b_length in
let larger' = Buffer max_len content idx len in
assert (b' == gsub larger' (U32.sub idx' idx) len');
ubuffer_preserved_elim larger' h1 h2
)*)
let ubuffer_disjoint' (x1 x2: ubuffer_) : GTot Type0 =
if x1.b_length = 0 || x2.b_length = 0
then True
else
(x1.b_max_length == x2.b_max_length /\
(x1.b_offset + x1.b_length <= x2.b_offset \/
x2.b_offset + x2.b_length <= x1.b_offset))
(* TODO: added this because of #606, now that it is fixed, we may not need it anymore *)
let ubuffer_disjoint0 (#r1 #r2:HS.rid) (#a1 #a2:nat) (b1:ubuffer r1 a1) (b2:ubuffer r2 a2) =
r1 == r2 /\ a1 == a2 /\
ubuffer_disjoint' (G.reveal b1) (G.reveal b2)
val ubuffer_disjoint (#r:HS.rid) (#a:nat) (b1 b2:ubuffer r a) :GTot Type0
let ubuffer_disjoint #r #a b1 b2 = ubuffer_disjoint0 b1 b2
val ubuffer_disjoint_sym (#r:HS.rid) (#a: nat) (b1 b2:ubuffer r a)
:Lemma (ubuffer_disjoint b1 b2 <==> ubuffer_disjoint b2 b1)
let ubuffer_disjoint_sym #_ #_ b1 b2 = ()
val ubuffer_disjoint_includes (#r: HS.rid) (#a: nat) (larger1 larger2: ubuffer r a) (smaller1 smaller2: ubuffer r a) : Lemma
(requires (ubuffer_disjoint larger1 larger2 /\ larger1 `ubuffer_includes` smaller1 /\ larger2 `ubuffer_includes` smaller2))
(ensures (ubuffer_disjoint smaller1 smaller2))
let ubuffer_disjoint_includes #r #a larger1 larger2 smaller1 smaller2 = ()
val liveness_preservation_intro (#a:Type0) (#rrel:srel a) (#rel:srel a)
(h h':HS.mem) (b:mbuffer a rrel rel)
(f: (
(t':Type0) ->
(pre: Preorder.preorder t') ->
(r: HS.mreference t' pre) ->
Lemma
(requires (HS.frameOf r == frameOf b /\ HS.as_addr r == as_addr b /\ h `HS.contains` r))
(ensures (h' `HS.contains` r))
))
:Lemma (requires (live h b)) (ensures (live h' b))
let liveness_preservation_intro #_ #_ #_ _ _ b f =
if Null? b
then ()
else f _ _ (Buffer?.content b)
(* Basic, non-compositional modifies clauses, used only to implement the generic modifies clause. DO NOT USE in client code *)
let modifies_0_preserves_mreferences (h1 h2: HS.mem) : GTot Type0 =
forall (a: Type) (pre: Preorder.preorder a) (r: HS.mreference a pre) .
h1 `HS.contains` r ==> (h2 `HS.contains` r /\ HS.sel h1 r == HS.sel h2 r)
let modifies_0_preserves_regions (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) . HS.live_region h1 r ==> HS.live_region h2 r
let modifies_0_preserves_not_unused_in (h1 h2: HS.mem) : GTot Type0 =
forall (r: HS.rid) (n: nat) . (
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
) ==> (
n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)
)
let modifies_0' (h1 h2: HS.mem) : GTot Type0 =
modifies_0_preserves_mreferences h1 h2 /\
modifies_0_preserves_regions h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2
val modifies_0 (h1 h2: HS.mem) : GTot Type0
let modifies_0 = modifies_0'
val modifies_0_live_region (h1 h2: HS.mem) (r: HS.rid) : Lemma
(requires (modifies_0 h1 h2 /\ HS.live_region h1 r))
(ensures (HS.live_region h2 r))
let modifies_0_live_region h1 h2 r = ()
val modifies_0_mreference (#a: Type) (#pre: Preorder.preorder a) (h1 h2: HS.mem) (r: HS.mreference a pre) : Lemma
(requires (modifies_0 h1 h2 /\ h1 `HS.contains` r))
(ensures (h2 `HS.contains` r /\ h1 `HS.sel` r == h2 `HS.sel` r))
let modifies_0_mreference #a #pre h1 h2 r = ()
let modifies_0_ubuffer
(#r: HS.rid)
(#a: nat)
(b: ubuffer r a)
(h1 h2: HS.mem)
: Lemma
(requires (modifies_0 h1 h2))
(ensures (ubuffer_preserved b h1 h2))
= same_mreference_ubuffer_preserved b h1 h2 (fun a' pre r' -> modifies_0_mreference h1 h2 r')
val modifies_0_unused_in
(h1 h2: HS.mem)
(r: HS.rid)
(n: nat)
: Lemma
(requires (
modifies_0 h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)
))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_0_unused_in h1 h2 r n = ()
let modifies_1_preserves_mreferences (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
:GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre).
((frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r') ==>
(h2 `HS.contains` r' /\ HS.sel h1 r' == HS.sel h2 r')
let modifies_1_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_from_to_preserves_ubuffers (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= forall (b':ubuffer (frameOf b) (as_addr b)).
(ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b') ==> ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2
let modifies_1_preserves_livenesses (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= forall (a':Type) (pre:Preorder.preorder a') (r':HS.mreference a' pre). h1 `HS.contains` r' ==> h2 `HS.contains` r'
let modifies_1' (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
: GTot Type0
= modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_preserves_ubuffers b h1 h2
val modifies_1 (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) :GTot Type0
let modifies_1 = modifies_1'
let modifies_1_from_to (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
: GTot Type0
= if ubuffer_of_buffer_from_to_none_cond b from to
then modifies_0 h1 h2
else
modifies_0_preserves_regions h1 h2 /\
modifies_1_preserves_mreferences b h1 h2 /\
modifies_1_preserves_livenesses b h1 h2 /\
modifies_0_preserves_not_unused_in h1 h2 /\
modifies_1_from_to_preserves_ubuffers b from to h1 h2
val modifies_1_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1 b h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
let modifies_1_live_region #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_live_region (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ HS.live_region h1 r)) (ensures (HS.live_region h2 r))
= ()
val modifies_1_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1 b h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
let modifies_1_liveness #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_liveness
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r':HS.mreference a' pre)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\ h1 `HS.contains` r')) (ensures (h2 `HS.contains` r'))
= ()
val modifies_1_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1 b h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
let modifies_1_unused_in #_ #_ #_ _ _ _ _ _ = ()
let modifies_1_from_to_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (r:HS.rid) (n:nat)
:Lemma (requires (modifies_1_from_to b from to h1 h2 /\
HS.live_region h1 r /\ HS.live_region h2 r /\
n `Heap.addr_unused_in` (HS.get_hmap h2 `Map.sel` r)))
(ensures (n `Heap.addr_unused_in` (HS.get_hmap h1 `Map.sel` r)))
= ()
val modifies_1_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1 b h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
let modifies_1_mreference #_ #_ #_ _ _ _ #_ #_ _ = ()
let modifies_1_from_to_mreference
(#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem)
(#a':Type0) (#pre:Preorder.preorder a') (r': HS.mreference a' pre)
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ (frameOf b <> HS.frameOf r' \/ as_addr b <> HS.as_addr r') /\ h1 `HS.contains` r'))
(ensures (h2 `HS.contains` r' /\ h1 `HS.sel` r' == h2 `HS.sel` r'))
= ()
val modifies_1_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1 b h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer b) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
let modifies_1_ubuffer #_ #_ #_ _ _ _ _ = ()
let modifies_1_from_to_ubuffer (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (from to: U32.t) (h1 h2:HS.mem) (b':ubuffer (frameOf b) (as_addr b))
: Lemma (requires (modifies_1_from_to b from to h1 h2 /\ ubuffer_disjoint #(frameOf b) #(as_addr b) (ubuffer_of_buffer_from_to b from to) b'))
(ensures (ubuffer_preserved #(frameOf b) #(as_addr b) b' h1 h2))
= ()
val modifies_1_null (#a:Type0) (#rrel:srel a) (#rel:srel a)
(b:mbuffer a rrel rel) (h1 h2:HS.mem)
: Lemma (requires (modifies_1 b h1 h2 /\ g_is_null b))
(ensures (modifies_0 h1 h2))
let modifies_1_null #_ #_ #_ _ _ _ = ()
let modifies_addr_of_preserves_not_unused_in (#a:Type0) (#rrel:srel a) (#rel:srel a) (b:mbuffer a rrel rel) (h1 h2:HS.mem) | false | false | LowStar.Monotonic.Buffer.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 4,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modifies_addr_of_preserves_not_unused_in
(#a: Type0)
(#rrel #rel: srel a)
(b: mbuffer a rrel rel)
(h1 h2: HS.mem)
: GTot Type0 | [] | LowStar.Monotonic.Buffer.modifies_addr_of_preserves_not_unused_in | {
"file_name": "ulib/LowStar.Monotonic.Buffer.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: LowStar.Monotonic.Buffer.mbuffer a rrel rel ->
h1: FStar.Monotonic.HyperStack.mem ->
h2: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Type0 | {
"end_col": 60,
"end_line": 733,
"start_col": 4,
"start_line": 729
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.