effect
stringclasses 48
values | original_source_type
stringlengths 0
23k
| opens_and_abbrevs
listlengths 2
92
| isa_cross_project_example
bool 1
class | source_definition
stringlengths 9
57.9k
| partial_definition
stringlengths 7
23.3k
| is_div
bool 2
classes | is_type
null | is_proof
bool 2
classes | completed_definiton
stringlengths 1
250k
| dependencies
dict | effect_flags
sequencelengths 0
2
| ideal_premises
sequencelengths 0
236
| mutual_with
sequencelengths 0
11
| file_context
stringlengths 0
407k
| interleaved
bool 1
class | is_simply_typed
bool 2
classes | file_name
stringlengths 5
48
| vconfig
dict | is_simple_lemma
null | source_type
stringlengths 10
23k
| proof_features
sequencelengths 0
1
| name
stringlengths 8
95
| source
dict | verbose_type
stringlengths 1
7.42k
| source_range
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Prims.Tot | val mods_contains (allowed found: mods_t) : bool | [
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t | val mods_contains (allowed found: mods_t) : bool
let rec mods_contains (allowed found: mods_t) : bool = | false | null | false | match found with
| [] -> true
| h :: t -> mods_contains1 allowed h && mods_contains allowed t | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.QuickCode.mod_t",
"Prims.list",
"Prims.op_AmpAmp",
"Vale.PPC64LE.QuickCodes.mods_contains1",
"Vale.PPC64LE.QuickCodes.mods_contains",
"Prims.bool"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr] | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mods_contains (allowed found: mods_t) : bool | [
"recursion"
] | Vale.PPC64LE.QuickCodes.mods_contains | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | allowed: Vale.PPC64LE.QuickCode.mods_t -> found: Vale.PPC64LE.QuickCode.mods_t -> Prims.bool | {
"end_col": 63,
"end_line": 47,
"start_col": 2,
"start_line": 45
} |
Prims.GTot | val eval_cmp (s: va_state) (c: cmp) : GTot bool | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2 | val eval_cmp (s: va_state) (c: cmp) : GTot bool
let eval_cmp (s: va_state) (c: cmp) : GTot bool = | false | null | false | match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2 | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.QuickCodes.cmp",
"Vale.PPC64LE.Machine_s.cmp_opr",
"Prims.op_Equality",
"Vale.PPC64LE.Machine_s.nat64",
"Vale.PPC64LE.Decls.va_eval_cmp_opr",
"Prims.op_disEquality",
"Prims.op_LessThanOrEqual",
"Prims.op_GreaterThanOrEqual",
"Prims.op_LessThan",
"Prims.op_GreaterThan",
"Prims.bool"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val eval_cmp (s: va_state) (c: cmp) : GTot bool | [] | Vale.PPC64LE.QuickCodes.eval_cmp | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> c: Vale.PPC64LE.QuickCodes.cmp -> Prims.GTot Prims.bool | {
"end_col": 64,
"end_line": 240,
"start_col": 2,
"start_line": 234
} |
Prims.Tot | val valid_cmp (c: cmp) (s: va_state) : Type0 | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1 | val valid_cmp (c: cmp) (s: va_state) : Type0
let valid_cmp (c: cmp) (s: va_state) : Type0 = | false | null | false | match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1 | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.cmp",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.cmp_opr",
"Prims.b2t",
"Vale.PPC64LE.Machine_s.valid_first_cmp_opr"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr] | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_cmp (c: cmp) (s: va_state) : Type0 | [] | Vale.PPC64LE.QuickCodes.valid_cmp | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | c: Vale.PPC64LE.QuickCodes.cmp -> s: Vale.PPC64LE.Decls.va_state -> Type0 | {
"end_col": 41,
"end_line": 230,
"start_col": 2,
"start_line": 224
} |
Prims.Tot | val cmp_to_ocmp (c: cmp) : ocmp | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2 | val cmp_to_ocmp (c: cmp) : ocmp
let cmp_to_ocmp (c: cmp) : ocmp = | false | null | false | match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2 | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.cmp",
"Vale.PPC64LE.Machine_s.cmp_opr",
"Vale.PPC64LE.Decls.va_cmp_eq",
"Vale.PPC64LE.Decls.va_cmp_ne",
"Vale.PPC64LE.Decls.va_cmp_le",
"Vale.PPC64LE.Decls.va_cmp_ge",
"Vale.PPC64LE.Decls.va_cmp_lt",
"Vale.PPC64LE.Decls.va_cmp_gt",
"Vale.PPC64LE.Decls.ocmp"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr] | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val cmp_to_ocmp (c: cmp) : ocmp | [] | Vale.PPC64LE.QuickCodes.cmp_to_ocmp | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | c: Vale.PPC64LE.QuickCodes.cmp -> Vale.PPC64LE.Decls.ocmp | {
"end_col": 35,
"end_line": 220,
"start_col": 2,
"start_line": 214
} |
Prims.Tot | val va_qIf
(#a: Type)
(#c1 #c2: code)
(mods: mods_t)
(b: cmp)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
: quickCode a (IfElse (cmp_to_ocmp b) c1 c2) | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods) | val va_qIf
(#a: Type)
(#c1 #c2: code)
(mods: mods_t)
(b: cmp)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
: quickCode a (IfElse (cmp_to_ocmp b) c1 c2)
let va_qIf
(#a: Type)
(#c1 #c2: code)
(mods: mods_t)
(b: cmp)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
: quickCode a (IfElse (cmp_to_ocmp b) c1 c2) = | false | null | false | QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.QuickCodes.cmp",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.QuickCode.QProc",
"Vale.PPC64LE.Machine_s.IfElse",
"Vale.PPC64LE.Decls.ins",
"Vale.PPC64LE.Decls.ocmp",
"Vale.PPC64LE.QuickCodes.cmp_to_ocmp",
"Vale.PPC64LE.QuickCodes.wp_If",
"Vale.PPC64LE.QuickCodes.qIf_proof"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_qIf
(#a: Type)
(#c1 #c2: code)
(mods: mods_t)
(b: cmp)
(qc1: quickCode a c1)
(qc2: quickCode a c2)
: quickCode a (IfElse (cmp_to_ocmp b) c1 c2) | [] | Vale.PPC64LE.QuickCodes.va_qIf | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
mods: Vale.PPC64LE.QuickCode.mods_t ->
b: Vale.PPC64LE.QuickCodes.cmp ->
qc1: Vale.PPC64LE.QuickCode.quickCode a c1 ->
qc2: Vale.PPC64LE.QuickCode.quickCode a c2
-> Vale.PPC64LE.QuickCode.quickCode a
(Vale.PPC64LE.Machine_s.IfElse (Vale.PPC64LE.QuickCodes.cmp_to_ocmp b) c1 c2) | {
"end_col": 93,
"end_line": 259,
"start_col": 2,
"start_line": 259
} |
Prims.Tot | val va_QLemma
(#a: Type0)
(#cs: codes)
(r: range)
(msg: string)
(pre: Type0)
(post: (squash pre -> Type0))
(l: (unit -> Lemma (requires pre) (ensures post ())))
(qcs: quickCodes a cs)
: quickCodes a cs | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs | val va_QLemma
(#a: Type0)
(#cs: codes)
(r: range)
(msg: string)
(pre: Type0)
(post: (squash pre -> Type0))
(l: (unit -> Lemma (requires pre) (ensures post ())))
(qcs: quickCodes a cs)
: quickCodes a cs
let va_QLemma
(#a: Type0)
(#cs: codes)
(r: range)
(msg: string)
(pre: Type0)
(post: (squash pre -> Type0))
(l: (unit -> Lemma (requires pre) (ensures post ())))
(qcs: quickCodes a cs)
: quickCodes a cs = | false | null | false | QLemma r msg pre post l qcs | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.codes",
"FStar.Range.range",
"Prims.string",
"Prims.squash",
"Prims.unit",
"Prims.Nil",
"FStar.Pervasives.pattern",
"Vale.PPC64LE.QuickCodes.quickCodes",
"Vale.PPC64LE.QuickCodes.QLemma"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_QLemma
(#a: Type0)
(#cs: codes)
(r: range)
(msg: string)
(pre: Type0)
(post: (squash pre -> Type0))
(l: (unit -> Lemma (requires pre) (ensures post ())))
(qcs: quickCodes a cs)
: quickCodes a cs | [] | Vale.PPC64LE.QuickCodes.va_QLemma | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
r: FStar.Range.range ->
msg: Prims.string ->
pre: Type0 ->
post: (_: Prims.squash pre -> Type0) ->
l: (_: Prims.unit -> FStar.Pervasives.Lemma (requires pre) (ensures post ())) ->
qcs: Vale.PPC64LE.QuickCodes.quickCodes a cs
-> Vale.PPC64LE.QuickCodes.quickCodes a cs | {
"end_col": 240,
"end_line": 74,
"start_col": 213,
"start_line": 74
} |
Prims.Tot | val mods_contains1 (allowed: mods_t) (found: mod_t) : bool | [
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found | val mods_contains1 (allowed: mods_t) (found: mod_t) : bool
let rec mods_contains1 (allowed: mods_t) (found: mod_t) : bool = | false | null | false | match allowed with
| [] -> mod_eq Mod_None found
| h :: t -> mod_eq h found || mods_contains1 t found | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.QuickCode.mod_t",
"Vale.PPC64LE.QuickCode.mod_eq",
"Vale.PPC64LE.QuickCode.Mod_None",
"Prims.list",
"Prims.op_BarBar",
"Vale.PPC64LE.QuickCodes.mods_contains1",
"Prims.bool"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr] | false | true | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mods_contains1 (allowed: mods_t) (found: mod_t) : bool | [
"recursion"
] | Vale.PPC64LE.QuickCodes.mods_contains1 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | allowed: Vale.PPC64LE.QuickCode.mods_t -> found: Vale.PPC64LE.QuickCode.mod_t -> Prims.bool | {
"end_col": 52,
"end_line": 41,
"start_col": 2,
"start_line": 39
} |
Prims.Tot | val va_qPURE
(#cs: codes)
(#pre: ((unit -> GTot Type0) -> GTot Type0){is_monotonic pre})
(#a: Type0)
(r: range)
(msg: string)
($l:
(unit
-> PURE unit
(intro_pure_wp_monotonicity pre;
pre)))
(qcs: quickCodes a cs)
: quickCodes a cs | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs | val va_qPURE
(#cs: codes)
(#pre: ((unit -> GTot Type0) -> GTot Type0){is_monotonic pre})
(#a: Type0)
(r: range)
(msg: string)
($l:
(unit
-> PURE unit
(intro_pure_wp_monotonicity pre;
pre)))
(qcs: quickCodes a cs)
: quickCodes a cs
let va_qPURE
(#cs: codes)
(#pre: ((unit -> GTot Type0) -> GTot Type0){is_monotonic pre})
(#a: Type0)
(r: range)
(msg: string)
($l:
(unit
-> PURE unit
(intro_pure_wp_monotonicity pre;
pre)))
(qcs: quickCodes a cs)
: quickCodes a cs = | false | null | false | QPURE r msg pre l qcs | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.codes",
"Prims.unit",
"FStar.Monotonic.Pure.is_monotonic",
"FStar.Range.range",
"Prims.string",
"FStar.Monotonic.Pure.intro_pure_wp_monotonicity",
"Vale.PPC64LE.QuickCodes.quickCodes",
"Vale.PPC64LE.QuickCodes.QPURE"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs) | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_qPURE
(#cs: codes)
(#pre: ((unit -> GTot Type0) -> GTot Type0){is_monotonic pre})
(#a: Type0)
(r: range)
(msg: string)
($l:
(unit
-> PURE unit
(intro_pure_wp_monotonicity pre;
pre)))
(qcs: quickCodes a cs)
: quickCodes a cs | [] | Vale.PPC64LE.QuickCodes.va_qPURE | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
r: FStar.Range.range ->
msg: Prims.string ->
$l: (_: Prims.unit -> Prims.PURE Prims.unit) ->
qcs: Vale.PPC64LE.QuickCodes.quickCodes a cs
-> Vale.PPC64LE.QuickCodes.quickCodes a cs | {
"end_col": 23,
"end_line": 82,
"start_col": 2,
"start_line": 82
} |
Prims.Tot | val qblock (#a: Type) (#cs: codes) (mods: mods_t) (qcs: (va_state -> GTot (quickCodes a cs)))
: quickCode a (block cs) | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods) | val qblock (#a: Type) (#cs: codes) (mods: mods_t) (qcs: (va_state -> GTot (quickCodes a cs)))
: quickCode a (block cs)
let qblock (#a: Type) (#cs: codes) (mods: mods_t) (qcs: (va_state -> GTot (quickCodes a cs)))
: quickCode a (block cs) = | false | null | false | QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.codes",
"Vale.PPC64LE.QuickCode.mods_t",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.QuickCodes.quickCodes",
"Vale.PPC64LE.QuickCode.QProc",
"Vale.PPC64LE.QuickCodes.block",
"Vale.PPC64LE.QuickCodes.wp_block",
"Vale.PPC64LE.QuickCodes.qblock_proof",
"Vale.PPC64LE.QuickCode.quickCode"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val qblock (#a: Type) (#cs: codes) (mods: mods_t) (qcs: (va_state -> GTot (quickCodes a cs)))
: quickCode a (block cs) | [] | Vale.PPC64LE.QuickCodes.qblock | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
mods: Vale.PPC64LE.QuickCode.mods_t ->
qcs: (_: Vale.PPC64LE.Decls.va_state -> Prims.GTot (Vale.PPC64LE.QuickCodes.quickCodes a cs))
-> Vale.PPC64LE.QuickCode.quickCode a (Vale.PPC64LE.QuickCodes.block cs) | {
"end_col": 67,
"end_line": 183,
"start_col": 2,
"start_line": 183
} |
Prims.Tot | val wp_sound_code_pre
(#a: Type0)
(#c: code)
(qc: quickCode a c)
(s0: va_state)
(k: (s0': va_state{s0 == s0'} -> va_state -> a -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let wp_sound_code_pre (#a:Type0) (#c:code) (qc:quickCode a c) (s0:va_state) (k:(s0':va_state{s0 == s0'}) -> va_state -> a -> Type0) : Type0 =
forall
(ok:bool)
(regs:Regs.t)
(vecs:Vecs.t)
(cr0:cr0_t)
(xer:xer_t)
//(mem:vale_full_heap) // splitting mem into its components makes the VCs slightly cleaner:
(mem_layout:vale_heap_layout)
(mem_heap:vale_heap)
(mem_heaplets:vale_heaplets)
(stack:machine_stack)
(stackTaint:memtaint)
.
let mem = {
vf_layout = mem_layout;
vf_heap = mem_heap;
vf_heaplets = mem_heaplets;
} in
let s0' = {
ok = ok;
regs = regs;
vecs = vecs;
cr0 = cr0;
xer = xer;
ms_heap = coerce mem;
ms_stack = stack;
ms_stackTaint = stackTaint
} in
s0 == s0' ==> QProc?.wp qc (state_eta s0') (k (state_eta s0')) | val wp_sound_code_pre
(#a: Type0)
(#c: code)
(qc: quickCode a c)
(s0: va_state)
(k: (s0': va_state{s0 == s0'} -> va_state -> a -> Type0))
: Type0
let wp_sound_code_pre
(#a: Type0)
(#c: code)
(qc: quickCode a c)
(s0: va_state)
(k: (s0': va_state{s0 == s0'} -> va_state -> a -> Type0))
: Type0 = | false | null | false | forall (ok: bool) (regs: Regs.t) (vecs: Vecs.t) (cr0: cr0_t) (xer: xer_t)
(mem_layout: vale_heap_layout) (mem_heap: vale_heap) (mem_heaplets: vale_heaplets)
(stack: machine_stack) (stackTaint: memtaint).
let mem = { vf_layout = mem_layout; vf_heap = mem_heap; vf_heaplets = mem_heaplets } in
let s0' =
{
ok = ok;
regs = regs;
vecs = vecs;
cr0 = cr0;
xer = xer;
ms_heap = coerce mem;
ms_stack = stack;
ms_stackTaint = stackTaint
}
in
s0 == s0' ==> QProc?.wp qc (state_eta s0') (k (state_eta s0')) | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.QuickCodes.code",
"Vale.PPC64LE.QuickCode.quickCode",
"Vale.PPC64LE.Decls.va_state",
"Prims.eq2",
"Prims.l_Forall",
"Prims.bool",
"Vale.PPC64LE.Regs.t",
"Vale.PPC64LE.Vecs.t",
"Vale.PPC64LE.Machine_s.cr0_t",
"Vale.PPC64LE.Machine_s.xer_t",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.Arch.HeapImpl.vale_heaplets",
"Vale.PPC64LE.Machine_s.machine_stack",
"Vale.PPC64LE.Memory.memtaint",
"Prims.l_imp",
"Vale.PPC64LE.Machine_s.state",
"Vale.PPC64LE.QuickCode.__proj__QProc__item__wp",
"Vale.PPC64LE.State.state_eta",
"Vale.PPC64LE.Machine_s.Mkstate",
"Vale.PPC64LE.Decls.coerce",
"Vale.Arch.Heap.heap_impl",
"Vale.Arch.HeapImpl.vale_full_heap",
"Vale.Arch.HeapImpl.Mkvale_full_heap"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr]
let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs
let tAssumeLemma (p:Type0) = unit -> Lemma (requires True) (ensures p)
val qAssumeLemma (p:Type0) : tAssumeLemma p
[@va_qattr]
let va_qAssume (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg True (fun () -> e) (qAssumeLemma e) qcs
let tAssertSquashLemma (p:Type0) = unit -> Ghost (squash p) (requires p) (ensures fun () -> p)
val qAssertSquashLemma (p:Type0) : tAssertSquashLemma p
[@va_qattr]
let va_qAssertSquash
(#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:squash e -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QGhost (squash e) r msg e (fun () -> e) (qAssertSquashLemma e) qcs
//let tAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) =
// unit -> Lemma (requires t_require s0 /\ wp [] qcs mods (fun _ _ -> p) s0) (ensures p)
//val qAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) : tAssertByLemma p qcs mods s0
//
//[@va_qattr]
//let va_qAssertBy (#a:Type) (#cs:codes) (mods:mods_t) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (s0:state) (qcsTail:quickCodes a cs) : quickCodes a cs =
// QLemma r msg (t_require s0 /\ wp [] qcsBy mods (fun _ _ -> p) s0) (fun () -> p) (qAssertByLemma p qcsBy mods s0) qcsTail
[@va_qattr]
let va_qAssertBy (#a:Type) (#cs:codes) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (qcsTail:quickCodes a cs) : quickCodes a cs =
QAssertBy r msg p qcsBy qcsTail
///// Code
val wp_sound_code (#a:Type0) (c:code) (qc:quickCode a c) (k:va_state -> a -> Type0) (s0:va_state) :
Ghost (va_state & fuel & a)
(requires t_require s0 /\ QProc?.wp qc s0 k)
(ensures fun (sN, fN, gN) -> eval_code c s0 fN sN /\ update_state_mods qc.mods sN s0 == sN /\ state_inv sN /\ k sN gN)
[@va_qattr]
let state_match (s0:va_state) (s1:va_state) : Type0 =
s0.ok == s1.ok /\
Regs.equal s0.regs s1.regs /\
Vecs.equal s0.vecs s1.vecs /\
s0.cr0 == s1.cr0 /\
s0.xer == s1.xer /\
s0.ms_heap == s1.ms_heap /\
s0.ms_stack == s1.ms_stack /\
s0.ms_stackTaint == s1.ms_stackTaint
val lemma_state_match (s0:va_state) (s1:va_state) : Lemma
(requires state_match s0 s1)
(ensures state_eq s0 s1)
[@va_qattr]
let va_state_match (s0:va_state) (s1:va_state) : Pure Type0
(requires True)
(ensures fun b -> b ==> state_eq s0 s1)
=
FStar.Classical.move_requires (lemma_state_match s0) s1;
state_match s0 s1
[@va_qattr] | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val wp_sound_code_pre
(#a: Type0)
(#c: code)
(qc: quickCode a c)
(s0: va_state)
(k: (s0': va_state{s0 == s0'} -> va_state -> a -> Type0))
: Type0 | [] | Vale.PPC64LE.QuickCodes.wp_sound_code_pre | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
qc: Vale.PPC64LE.QuickCode.quickCode a c ->
s0: Vale.PPC64LE.Decls.va_state ->
k:
(s0': Vale.PPC64LE.Decls.va_state{s0 == s0'} -> _: Vale.PPC64LE.Decls.va_state -> _: a
-> Type0)
-> Type0 | {
"end_col": 66,
"end_line": 403,
"start_col": 2,
"start_line": 375
} |
Prims.Pure | val va_state_match (s0 s1: va_state)
: Pure Type0 (requires True) (ensures fun b -> b ==> state_eq s0 s1) | [
{
"abbrev": false,
"full_module": "FStar.Monotonic.Pure",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Range",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_state_match (s0:va_state) (s1:va_state) : Pure Type0
(requires True)
(ensures fun b -> b ==> state_eq s0 s1)
=
FStar.Classical.move_requires (lemma_state_match s0) s1;
state_match s0 s1 | val va_state_match (s0 s1: va_state)
: Pure Type0 (requires True) (ensures fun b -> b ==> state_eq s0 s1)
let va_state_match (s0 s1: va_state)
: Pure Type0 (requires True) (ensures fun b -> b ==> state_eq s0 s1) = | false | null | false | FStar.Classical.move_requires (lemma_state_match s0) s1;
state_match s0 s1 | {
"checked_file": "Vale.PPC64LE.QuickCodes.fsti.checked",
"dependencies": [
"Vale.PPC64LE.Vecs.fsti.checked",
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Regs.fsti.checked",
"Vale.PPC64LE.QuickCode.fst.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.PPC64LE.Decls.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"prims.fst.checked",
"FStar.Range.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.Pure.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.QuickCodes.fsti"
} | [] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.QuickCodes.state_match",
"Prims.unit",
"FStar.Classical.move_requires",
"Vale.PPC64LE.State.state_eq",
"Vale.PPC64LE.QuickCodes.lemma_state_match",
"Prims.l_True",
"Prims.l_imp"
] | [] | module Vale.PPC64LE.QuickCodes
// Optimized weakest precondition generation for 'quick' procedures
open FStar.Mul
open FStar.Range
open Vale.Def.Prop_s
open Vale.Arch.HeapImpl
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.Memory
open Vale.PPC64LE.Stack_i
open Vale.PPC64LE.State
open Vale.PPC64LE.Decls
open Vale.PPC64LE.QuickCode
unfold let code = va_code
unfold let codes = va_codes
unfold let fuel = va_fuel
unfold let eval = eval_code
[@va_qattr "opaque_to_smt"]
let labeled_wrap (r:range) (msg:string) (p:Type0) : GTot Type0 = labeled r msg p
// REVIEW: when used inside a function definition, 'labeled' can show up in an SMT query
// as an uninterpreted function. Make a wrapper around labeled that is interpreted:
[@va_qattr "opaque_to_smt"]
let label (r:range) (msg:string) (p:Type0) : Ghost Type (requires True) (ensures fun q -> q <==> p) =
assert_norm (labeled_wrap r msg p <==> p);
labeled_wrap r msg p
val lemma_label_bool (r:range) (msg:string) (b:bool) : Lemma
(requires label r msg b)
(ensures b)
[SMTPat (label r msg b)]
// wrap "precedes" and LexCons to avoid issues with label (precedes ...)
let precedes_wrap (#a:Type) (x y:a) : GTot Type0 = precedes x y
[@va_qattr]
let rec mods_contains1 (allowed:mods_t) (found:mod_t) : bool =
match allowed with
| [] -> mod_eq Mod_None found
| h::t -> mod_eq h found || mods_contains1 t found
[@va_qattr]
let rec mods_contains (allowed:mods_t) (found:mods_t) : bool =
match found with
| [] -> true
| h::t -> mods_contains1 allowed h && mods_contains allowed t
[@va_qattr]
let if_code (b:bool) (c1:code) (c2:code) : code = if b then c1 else c2
open FStar.Monotonic.Pure
noeq type quickCodes (a:Type0) : codes -> Type =
| QEmpty: a -> quickCodes a []
| QSeq: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> quickCodes a cs -> quickCodes a (c::cs)
| QBind: #b:Type -> #c:code -> #cs:codes -> r:range -> msg:string ->
quickCode b c -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a (c::cs)
| QGetState: #cs:codes -> (va_state -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QPURE: #cs:codes -> r:range -> msg:string -> pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre} ->
(unit -> PURE unit (as_pure_wp pre)) -> quickCodes a cs -> quickCodes a cs
//| QBindPURE: #cs:codes -> b:Type -> r:range -> msg:string -> pre:((b -> GTot Type0) -> GTot Type0) ->
// (unit -> PURE b pre) -> (va_state -> b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QLemma: #cs:codes -> r:range -> msg:string -> pre:Type0 -> post:(squash pre -> Type0) ->
(unit -> Lemma (requires pre) (ensures post ())) -> quickCodes a cs -> quickCodes a cs
| QGhost: #cs:codes -> b:Type -> r:range -> msg:string -> pre:Type0 -> post:(b -> Type0) ->
(unit -> Ghost b (requires pre) (ensures post)) -> (b -> GTot (quickCodes a cs)) -> quickCodes a ((Block [])::cs)
| QAssertBy: #cs:codes -> r:range -> msg:string -> p:Type0 ->
quickCodes unit [] -> quickCodes a cs -> quickCodes a cs
[@va_qattr] unfold let va_QBind (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:va_state -> b -> GTot (quickCodes a cs)) : quickCodes a (c::cs) = QBind r msg qc qcs
[@va_qattr] unfold let va_QEmpty (#a:Type0) (v:a) : quickCodes a [] = QEmpty v
[@va_qattr] unfold let va_QLemma (#a:Type0) (#cs:codes) (r:range) (msg:string) (pre:Type0) (post:(squash pre -> Type0)) (l:unit -> Lemma (requires pre) (ensures post ())) (qcs:quickCodes a cs) : quickCodes a cs = QLemma r msg pre post l qcs
[@va_qattr] unfold let va_QSeq (#a:Type0) (#b:Type) (#c:code) (#cs:codes) (r:range) (msg:string) (qc:quickCode b c) (qcs:quickCodes a cs) : quickCodes a (c::cs) = QSeq r msg qc qcs
[@va_qattr]
let va_qPURE
(#cs:codes) (#pre:((unit -> GTot Type0) -> GTot Type0){is_monotonic pre}) (#a:Type0) (r:range) (msg:string)
($l:unit -> PURE unit (intro_pure_wp_monotonicity pre; pre)) (qcs:quickCodes a cs)
: quickCodes a cs =
QPURE r msg pre l qcs
(* REVIEW: this might be useful, but inference of pre doesn't work as well as for va_qPURE
(need to provide pre explicitly; as a result, no need to put $ on l)
[@va_qattr]
let va_qBindPURE
(#a #b:Type0) (#cs:codes) (pre:(b -> GTot Type0) -> GTot Type0) (r:range) (msg:string)
(l:unit -> PURE b pre) (qcs:va_state -> b -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QBindPURE b r msg pre l qcs
*)
[@va_qattr]
let wp_proc (#a:Type0) (c:code) (qc:quickCode a c) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
match qc with
| QProc _ _ wp _ -> wp s0 k
let wp_Seq_t (a:Type0) = va_state -> a -> Type0
let wp_Bind_t (a:Type0) = va_state -> a -> Type0
let k_AssertBy (p:Type0) (_:va_state) () = p
[@va_qattr]
let va_range1 = mk_range "" 0 0 0 0
val empty_list_is_small (#a:Type) (x:list a) : Lemma
([] #a == x \/ [] #a << x)
[@va_qattr]
let rec wp (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state) :
Tot Type0 (decreases %[cs; 0; qcs])
=
match qcs with
| QEmpty g -> k s0 g
| QSeq r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Seq cs qcs mods k))
| QBind r msg qc qcs ->
let c::cs = cs in
label r msg (mods_contains mods qc.mods /\ wp_proc c qc s0 (wp_Bind cs qcs mods k))
| QGetState f ->
let c::cs = cs in
wp cs (f s0) mods k s0
| QPURE r msg pre l qcs ->
// REVIEW: rather than just applying 'pre' directly to k,
// we define this in a roundabout way so that:
// - it works even if 'pre' isn't known to be monotonic
// - F*'s error reporting uses 'guard_free' to process labels inside (wp cs qcs mods k s0)
(forall (p:unit -> GTot Type0).//{:pattern (pre p)}
(forall (u:unit).{:pattern (guard_free (p u))} wp cs qcs mods k s0 ==> p ())
==>
label r msg (pre p))
(*
| QBindPURE b r msg pre l qcs ->
let c::cs = cs in
(forall (p:b -> GTot Type0).//{:pattern (pre p)}
(forall (g:b).{:pattern (guard_free (p g))} wp cs (qcs s0 g) mods k s0 ==> p g)
==>
label r msg (pre p))
*)
| QLemma r msg pre post l qcs ->
label r msg pre /\ (post () ==> wp cs qcs mods k s0)
| QGhost b r msg pre post l qcs ->
let c::cs = cs in
label r msg pre /\ (forall (g:b). post g ==> wp cs (qcs g) mods k s0)
| QAssertBy r msg p qcsBy qcs ->
empty_list_is_small cs;
wp [] qcsBy mods (k_AssertBy p) s0 /\ (p ==> wp cs qcs mods k s0)
// Hoist lambdas out of main definition to avoid issues with function equality
and wp_Seq (#a:Type0) (#b:Type0) (cs:codes) (qcs:quickCodes b cs) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Seq_t a) (decreases %[cs; 1; qcs])
=
let f s0 _ = wp cs qcs mods k s0 in f
and wp_Bind (#a:Type0) (#b:Type0) (cs:codes) (qcs:va_state -> a -> GTot (quickCodes b cs)) (mods:mods_t) (k:va_state -> b -> Type0) :
Tot (wp_Bind_t a) (decreases %[cs; 1; qcs])
=
let f s0 g = wp cs (qcs s0 g) mods k s0 in f
val wp_sound (#a:Type0) (cs:codes) (qcs:quickCodes a cs) (mods:mods_t) (k:va_state -> a -> Type0) (s0:va_state)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp cs qcs mods k s0)
(ensures fun (sN, fN, gN) ->
eval (Block cs) s0 fN sN /\ update_state_mods mods sN s0 == sN /\ state_inv sN /\ k sN gN
)
///// Block
unfold let block = va_Block
[@va_qattr]
let wp_block (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
wp cs (qcs s0) mods k s0
val qblock_proof (#a:Type) (#cs:codes) (qcs:va_state -> GTot (quickCodes a cs)) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_block qcs mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (block cs) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let qblock (#a:Type) (#cs:codes) (mods:mods_t) (qcs:va_state -> GTot (quickCodes a cs)) : quickCode a (block cs) =
QProc (block cs) mods (wp_block qcs mods) (qblock_proof qcs mods)
///// If, InlineIf
[@va_qattr]
let wp_InlineIf (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
( b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s0 k) /\
(not b ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s0 k)
val qInlineIf_proof (#a:Type) (#c1:code) (#c2:code) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_InlineIf b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (if_code b c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qInlineIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:bool) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (if_code b c1 c2) =
QProc (if_code b c1 c2) mods (wp_InlineIf b qc1 qc2 mods) (qInlineIf_proof b qc1 qc2 mods)
noeq type cmp =
| Cmp_eq : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ne : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_le : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_ge : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_lt : o1:cmp_opr -> o2:cmp_opr -> cmp
| Cmp_gt : o1:cmp_opr -> o2:cmp_opr -> cmp
[@va_qattr]
let cmp_to_ocmp (c:cmp) : ocmp =
match c with
| Cmp_eq o1 o2 -> va_cmp_eq o1 o2
| Cmp_ne o1 o2 -> va_cmp_ne o1 o2
| Cmp_le o1 o2 -> va_cmp_le o1 o2
| Cmp_ge o1 o2 -> va_cmp_ge o1 o2
| Cmp_lt o1 o2 -> va_cmp_lt o1 o2
| Cmp_gt o1 o2 -> va_cmp_gt o1 o2
[@va_qattr]
let valid_cmp (c:cmp) (s:va_state) : Type0 =
match c with
| Cmp_eq o1 _ -> valid_first_cmp_opr o1
| Cmp_ne o1 _ -> valid_first_cmp_opr o1
| Cmp_le o1 _ -> valid_first_cmp_opr o1
| Cmp_ge o1 _ -> valid_first_cmp_opr o1
| Cmp_lt o1 _ -> valid_first_cmp_opr o1
| Cmp_gt o1 _ -> valid_first_cmp_opr o1
[@va_qattr]
let eval_cmp (s:va_state) (c:cmp) : GTot bool =
match c with
| Cmp_eq o1 o2 -> va_eval_cmp_opr s o1 = va_eval_cmp_opr s o2
| Cmp_ne o1 o2 -> va_eval_cmp_opr s o1 <> va_eval_cmp_opr s o2
| Cmp_le o1 o2 -> va_eval_cmp_opr s o1 <= va_eval_cmp_opr s o2
| Cmp_ge o1 o2 -> va_eval_cmp_opr s o1 >= va_eval_cmp_opr s o2
| Cmp_lt o1 o2 -> va_eval_cmp_opr s o1 < va_eval_cmp_opr s o2
| Cmp_gt o1 o2 -> va_eval_cmp_opr s o1 > va_eval_cmp_opr s o2
[@va_qattr]
let wp_If (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0) : Type0 =
// REVIEW: this duplicates k
valid_cmp b s0 /\ mods_contains1 mods Mod_cr0 /\
(let s1 = va_upd_cr0 (eval_cmp_cr0 s0 (cmp_to_ocmp b)) s0 in
( eval_cmp s0 b ==> mods_contains mods qc1.mods /\ QProc?.wp qc1 s1 k) /\
(not (eval_cmp s0 b) ==> mods_contains mods qc2.mods /\ QProc?.wp qc2 s1 k))
val qIf_proof (#a:Type) (#c1:code) (#c2:code) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) (mods:mods_t) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_If b qc1 qc2 mods s0 k)
(ensures fun (sM, f0, g) ->
eval_code (IfElse (cmp_to_ocmp b) c1 c2) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qIf (#a:Type) (#c1:code) (#c2:code) (mods:mods_t) (b:cmp) (qc1:quickCode a c1) (qc2:quickCode a c2) : quickCode a (IfElse (cmp_to_ocmp b) c1 c2) =
QProc (IfElse (cmp_to_ocmp b) c1 c2) mods (wp_If b qc1 qc2 mods) (qIf_proof b qc1 qc2 mods)
///// While
[@va_qattr]
let wp_While_inv
(#a #d:Type) (#c:code) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (s1:va_state) (g1:a) (s2:va_state) (g2:a)
: Type0 =
s2.ok /\ inv s2 g2 /\ mods_contains mods (qc g2).mods /\ dec s2 g2 << dec s1 g1
[@va_qattr]
let wp_While_body
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g1:a) (s1:va_state) (k:va_state -> a -> Type0)
: Type0 =
valid_cmp b s1 /\
(let s1' = va_upd_cr0 (eval_cmp_cr0 s1 (cmp_to_ocmp b)) s1 in
( eval_cmp s1 b ==> mods_contains mods (qc g1).mods /\ QProc?.wp (qc g1) s1' (wp_While_inv qc mods inv dec s1 g1)) /\
(not (eval_cmp s1 b) ==> k s1' g1))
[@va_qattr]
let wp_While
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Type0 =
inv s0 g0 /\ mods_contains mods (qc g0).mods /\ mods_contains1 mods Mod_cr0 /\
// REVIEW: we could get a better WP with forall (...state components...) instead of forall (s1:va_state)
(forall (s1:va_state) (g1:a). inv s1 g1 ==> wp_While_body b qc mods inv dec g1 s1 k)
val qWhile_proof
(#a #d:Type) (#c:code) (b:cmp) (qc:a -> quickCode a c) (mods:mods_t) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a) (s0:va_state) (k:va_state -> a -> Type0)
: Ghost (va_state & va_fuel & a)
(requires t_require s0 /\ wp_While b qc mods inv dec g0 s0 k)
(ensures fun (sM, f0, g) ->
eval_code (While (cmp_to_ocmp b) c) s0 f0 sM /\ update_state_mods mods sM s0 == sM /\ state_inv sM /\ k sM g
)
[@"opaque_to_smt" va_qattr]
let va_qWhile
(#a #d:Type) (#c:code) (mods:mods_t) (b:cmp) (qc:a -> quickCode a c) (inv:va_state -> a -> Type0)
(dec:va_state -> a -> d) (g0:a)
: quickCode a (While (cmp_to_ocmp b) c) =
QProc (While (cmp_to_ocmp b) c) mods (wp_While b qc mods inv dec g0)
(qWhile_proof b qc mods inv dec g0)
///// Assert, Assume, AssertBy
let tAssertLemma (p:Type0) = unit -> Lemma (requires p) (ensures p)
val qAssertLemma (p:Type0) : tAssertLemma p
[@va_qattr]
let va_qAssert (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg e (fun () -> e) (qAssertLemma e) qcs
let tAssumeLemma (p:Type0) = unit -> Lemma (requires True) (ensures p)
val qAssumeLemma (p:Type0) : tAssumeLemma p
[@va_qattr]
let va_qAssume (#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:quickCodes a cs) : quickCodes a cs =
QLemma r msg True (fun () -> e) (qAssumeLemma e) qcs
let tAssertSquashLemma (p:Type0) = unit -> Ghost (squash p) (requires p) (ensures fun () -> p)
val qAssertSquashLemma (p:Type0) : tAssertSquashLemma p
[@va_qattr]
let va_qAssertSquash
(#a:Type) (#cs:codes) (r:range) (msg:string) (e:Type0) (qcs:squash e -> GTot (quickCodes a cs))
: quickCodes a ((Block [])::cs) =
QGhost (squash e) r msg e (fun () -> e) (qAssertSquashLemma e) qcs
//let tAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) =
// unit -> Lemma (requires t_require s0 /\ wp [] qcs mods (fun _ _ -> p) s0) (ensures p)
//val qAssertByLemma (#a:Type) (p:Type0) (qcs:quickCodes a []) (mods:mods_t) (s0:state) : tAssertByLemma p qcs mods s0
//
//[@va_qattr]
//let va_qAssertBy (#a:Type) (#cs:codes) (mods:mods_t) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (s0:state) (qcsTail:quickCodes a cs) : quickCodes a cs =
// QLemma r msg (t_require s0 /\ wp [] qcsBy mods (fun _ _ -> p) s0) (fun () -> p) (qAssertByLemma p qcsBy mods s0) qcsTail
[@va_qattr]
let va_qAssertBy (#a:Type) (#cs:codes) (r:range) (msg:string) (p:Type0) (qcsBy:quickCodes unit []) (qcsTail:quickCodes a cs) : quickCodes a cs =
QAssertBy r msg p qcsBy qcsTail
///// Code
val wp_sound_code (#a:Type0) (c:code) (qc:quickCode a c) (k:va_state -> a -> Type0) (s0:va_state) :
Ghost (va_state & fuel & a)
(requires t_require s0 /\ QProc?.wp qc s0 k)
(ensures fun (sN, fN, gN) -> eval_code c s0 fN sN /\ update_state_mods qc.mods sN s0 == sN /\ state_inv sN /\ k sN gN)
[@va_qattr]
let state_match (s0:va_state) (s1:va_state) : Type0 =
s0.ok == s1.ok /\
Regs.equal s0.regs s1.regs /\
Vecs.equal s0.vecs s1.vecs /\
s0.cr0 == s1.cr0 /\
s0.xer == s1.xer /\
s0.ms_heap == s1.ms_heap /\
s0.ms_stack == s1.ms_stack /\
s0.ms_stackTaint == s1.ms_stackTaint
val lemma_state_match (s0:va_state) (s1:va_state) : Lemma
(requires state_match s0 s1)
(ensures state_eq s0 s1)
[@va_qattr]
let va_state_match (s0:va_state) (s1:va_state) : Pure Type0
(requires True)
(ensures fun b -> b ==> state_eq s0 s1) | false | false | Vale.PPC64LE.QuickCodes.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_state_match (s0 s1: va_state)
: Pure Type0 (requires True) (ensures fun b -> b ==> state_eq s0 s1) | [] | Vale.PPC64LE.QuickCodes.va_state_match | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.QuickCodes.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s0: Vale.PPC64LE.Decls.va_state -> s1: Vale.PPC64LE.Decls.va_state -> Prims.Pure Type0 | {
"end_col": 19,
"end_line": 371,
"start_col": 2,
"start_line": 370
} |
FStar.Pervasives.Lemma | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let compute_iv_BE_reveal = opaque_revealer (`%compute_iv_BE) compute_iv_BE compute_iv_BE_def | let compute_iv_BE_reveal = | false | null | true | opaque_revealer (`%compute_iv_BE) compute_iv_BE compute_iv_BE_def | {
"checked_file": "Vale.AES.GCM_BE_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_BE_s.fst.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM_BE_s.fst"
} | [
"lemma"
] | [
"Vale.Def.Opaque_s.opaque_revealer",
"Vale.Def.Types_s.quad32",
"Vale.AES.GCM_BE_s.supported_iv_BE",
"Vale.AES.GCM_BE_s.compute_iv_BE",
"Vale.AES.GCM_BE_s.compute_iv_BE_def"
] | [] | module Vale.AES.GCM_BE_s
open Vale.Arch.Types
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Two_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GHash_BE_s
open FStar.Seq
open FStar.Mul
#reset-options "--z3rlimit 30"
type supported_iv_BE:eqtype = iv:seq nat8 { 1 <= 8 * (length iv) /\ 8 * (length iv) < pow2_64 }
let compute_iv_BE_def (h_BE:quad32) (iv:supported_iv_BE) : quad32
=
if 8 * (length iv) = 96 then (
let iv_BE = be_bytes_to_quad32 (pad_to_128_bits iv) in
let j0_BE = Mkfour 1 iv_BE.lo1 iv_BE.hi2 iv_BE.hi3 in
j0_BE
) else (
let padded_iv_quads = be_bytes_to_seq_quad32 (pad_to_128_bits iv) in
let length_BE = two_two_to_four (Mktwo (nat_to_two 32 0) (nat_to_two 32 (8 * length iv))) in
let hash_input_BE = append padded_iv_quads (create 1 length_BE) in
let hash_output_BE = ghash_BE h_BE hash_input_BE in
hash_output_BE
) | false | false | Vale.AES.GCM_BE_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val compute_iv_BE_reveal : _: Prims.unit
-> FStar.Pervasives.Lemma
(ensures Vale.AES.GCM_BE_s.compute_iv_BE == Vale.AES.GCM_BE_s.compute_iv_BE_def) | [] | Vale.AES.GCM_BE_s.compute_iv_BE_reveal | {
"file_name": "vale/specs/crypto/Vale.AES.GCM_BE_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | _: Prims.unit
-> FStar.Pervasives.Lemma
(ensures Vale.AES.GCM_BE_s.compute_iv_BE == Vale.AES.GCM_BE_s.compute_iv_BE_def) | {
"end_col": 104,
"end_line": 34,
"start_col": 39,
"start_line": 34
} |
|
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm_encrypt_BE = opaque_make gcm_encrypt_BE_def | let gcm_encrypt_BE = | false | null | false | opaque_make gcm_encrypt_BE_def | {
"checked_file": "Vale.AES.GCM_BE_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_BE_s.fst.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM_BE_s.fst"
} | [] | [
"Vale.Def.Opaque_s.opaque_make",
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat8",
"Vale.AES.GCM_BE_s.supported_iv_BE",
"FStar.Pervasives.Native.tuple2",
"Prims.l_and",
"Vale.AES.AES_common_s.is_aes_key",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"Vale.Def.Words_s.pow2_32",
"Prims.l_True",
"Vale.AES.GCM_BE_s.gcm_encrypt_BE_def"
] | [] | module Vale.AES.GCM_BE_s
open Vale.Arch.Types
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Two_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GHash_BE_s
open FStar.Seq
open FStar.Mul
#reset-options "--z3rlimit 30"
type supported_iv_BE:eqtype = iv:seq nat8 { 1 <= 8 * (length iv) /\ 8 * (length iv) < pow2_64 }
let compute_iv_BE_def (h_BE:quad32) (iv:supported_iv_BE) : quad32
=
if 8 * (length iv) = 96 then (
let iv_BE = be_bytes_to_quad32 (pad_to_128_bits iv) in
let j0_BE = Mkfour 1 iv_BE.lo1 iv_BE.hi2 iv_BE.hi3 in
j0_BE
) else (
let padded_iv_quads = be_bytes_to_seq_quad32 (pad_to_128_bits iv) in
let length_BE = two_two_to_four (Mktwo (nat_to_two 32 0) (nat_to_two 32 (8 * length iv))) in
let hash_input_BE = append padded_iv_quads (create 1 length_BE) in
let hash_output_BE = ghash_BE h_BE hash_input_BE in
hash_output_BE
)
[@"opaque_to_smt"] let compute_iv_BE = opaque_make compute_iv_BE_def
irreducible let compute_iv_BE_reveal = opaque_revealer (`%compute_iv_BE) compute_iv_BE compute_iv_BE_def
let gcm_encrypt_BE_def (alg:algorithm) (key:seq nat8) (iv:supported_iv_BE) (plain:seq nat8) (auth:seq nat8) :
Pure (seq nat8 & seq nat8)
(requires
is_aes_key alg key /\
length plain < pow2_32 /\
length auth < pow2_32
)
(ensures fun (c, t) -> True)
=
let key_BE = seq_nat8_to_seq_nat32_BE key in
let h_BE = aes_encrypt_word alg key_BE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_BE iv in
let c = gctr_encrypt (inc32 j0_BE 1) plain alg key_BE in
// Sets the first 64-bit number to 8 * length plain, and the second to 8* length auth
let lengths_BE = two_two_to_four (Mktwo (nat_to_two 32 (8 * length auth)) (nat_to_two 32 (8 * length plain))) in
let zero_padded_c_BE = be_bytes_to_seq_quad32 (pad_to_128_bits c) in
let zero_padded_a_BE = be_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_BE = append zero_padded_a_BE (append zero_padded_c_BE (create 1 lengths_BE)) in
let s_BE = ghash_BE h_BE hash_input_BE in
let t = gctr_encrypt j0_BE (be_quad32_to_bytes s_BE) alg key_BE in | false | false | Vale.AES.GCM_BE_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm_encrypt_BE : alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
iv: Vale.AES.GCM_BE_s.supported_iv_BE ->
plain: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
auth: FStar.Seq.Base.seq Vale.Def.Types_s.nat8
-> Prims.Pure
(FStar.Seq.Base.seq Vale.Def.Types_s.nat8 * FStar.Seq.Base.seq Vale.Def.Types_s.nat8) | [] | Vale.AES.GCM_BE_s.gcm_encrypt_BE | {
"file_name": "vale/specs/crypto/Vale.AES.GCM_BE_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
iv: Vale.AES.GCM_BE_s.supported_iv_BE ->
plain: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
auth: FStar.Seq.Base.seq Vale.Def.Types_s.nat8
-> Prims.Pure
(FStar.Seq.Base.seq Vale.Def.Types_s.nat8 * FStar.Seq.Base.seq Vale.Def.Types_s.nat8) | {
"end_col": 70,
"end_line": 62,
"start_col": 40,
"start_line": 62
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let compute_iv_BE = opaque_make compute_iv_BE_def | let compute_iv_BE = | false | null | false | opaque_make compute_iv_BE_def | {
"checked_file": "Vale.AES.GCM_BE_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_BE_s.fst.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM_BE_s.fst"
} | [
"total"
] | [
"Vale.Def.Opaque_s.opaque_make",
"Vale.Def.Types_s.quad32",
"Vale.AES.GCM_BE_s.supported_iv_BE",
"Vale.AES.GCM_BE_s.compute_iv_BE_def"
] | [] | module Vale.AES.GCM_BE_s
open Vale.Arch.Types
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Two_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GHash_BE_s
open FStar.Seq
open FStar.Mul
#reset-options "--z3rlimit 30"
type supported_iv_BE:eqtype = iv:seq nat8 { 1 <= 8 * (length iv) /\ 8 * (length iv) < pow2_64 }
let compute_iv_BE_def (h_BE:quad32) (iv:supported_iv_BE) : quad32
=
if 8 * (length iv) = 96 then (
let iv_BE = be_bytes_to_quad32 (pad_to_128_bits iv) in
let j0_BE = Mkfour 1 iv_BE.lo1 iv_BE.hi2 iv_BE.hi3 in
j0_BE
) else (
let padded_iv_quads = be_bytes_to_seq_quad32 (pad_to_128_bits iv) in
let length_BE = two_two_to_four (Mktwo (nat_to_two 32 0) (nat_to_two 32 (8 * length iv))) in
let hash_input_BE = append padded_iv_quads (create 1 length_BE) in
let hash_output_BE = ghash_BE h_BE hash_input_BE in
hash_output_BE | false | true | Vale.AES.GCM_BE_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val compute_iv_BE : _: Vale.Def.Types_s.quad32 -> _: Vale.AES.GCM_BE_s.supported_iv_BE -> Vale.Def.Types_s.quad32 | [] | Vale.AES.GCM_BE_s.compute_iv_BE | {
"file_name": "vale/specs/crypto/Vale.AES.GCM_BE_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | _: Vale.Def.Types_s.quad32 -> _: Vale.AES.GCM_BE_s.supported_iv_BE -> Vale.Def.Types_s.quad32 | {
"end_col": 68,
"end_line": 33,
"start_col": 39,
"start_line": 33
} |
|
FStar.Pervasives.Lemma | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm_encrypt_BE_reveal = opaque_revealer (`%gcm_encrypt_BE) gcm_encrypt_BE gcm_encrypt_BE_def | let gcm_encrypt_BE_reveal = | false | null | true | opaque_revealer (`%gcm_encrypt_BE) gcm_encrypt_BE gcm_encrypt_BE_def | {
"checked_file": "Vale.AES.GCM_BE_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_BE_s.fst.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM_BE_s.fst"
} | [
"lemma"
] | [
"Vale.Def.Opaque_s.opaque_revealer",
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat8",
"Vale.AES.GCM_BE_s.supported_iv_BE",
"FStar.Pervasives.Native.tuple2",
"Prims.l_and",
"Vale.AES.AES_common_s.is_aes_key",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"Vale.Def.Words_s.pow2_32",
"Prims.l_True",
"Vale.AES.GCM_BE_s.gcm_encrypt_BE",
"Vale.AES.GCM_BE_s.gcm_encrypt_BE_def"
] | [] | module Vale.AES.GCM_BE_s
open Vale.Arch.Types
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Two_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GHash_BE_s
open FStar.Seq
open FStar.Mul
#reset-options "--z3rlimit 30"
type supported_iv_BE:eqtype = iv:seq nat8 { 1 <= 8 * (length iv) /\ 8 * (length iv) < pow2_64 }
let compute_iv_BE_def (h_BE:quad32) (iv:supported_iv_BE) : quad32
=
if 8 * (length iv) = 96 then (
let iv_BE = be_bytes_to_quad32 (pad_to_128_bits iv) in
let j0_BE = Mkfour 1 iv_BE.lo1 iv_BE.hi2 iv_BE.hi3 in
j0_BE
) else (
let padded_iv_quads = be_bytes_to_seq_quad32 (pad_to_128_bits iv) in
let length_BE = two_two_to_four (Mktwo (nat_to_two 32 0) (nat_to_two 32 (8 * length iv))) in
let hash_input_BE = append padded_iv_quads (create 1 length_BE) in
let hash_output_BE = ghash_BE h_BE hash_input_BE in
hash_output_BE
)
[@"opaque_to_smt"] let compute_iv_BE = opaque_make compute_iv_BE_def
irreducible let compute_iv_BE_reveal = opaque_revealer (`%compute_iv_BE) compute_iv_BE compute_iv_BE_def
let gcm_encrypt_BE_def (alg:algorithm) (key:seq nat8) (iv:supported_iv_BE) (plain:seq nat8) (auth:seq nat8) :
Pure (seq nat8 & seq nat8)
(requires
is_aes_key alg key /\
length plain < pow2_32 /\
length auth < pow2_32
)
(ensures fun (c, t) -> True)
=
let key_BE = seq_nat8_to_seq_nat32_BE key in
let h_BE = aes_encrypt_word alg key_BE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_BE iv in
let c = gctr_encrypt (inc32 j0_BE 1) plain alg key_BE in
// Sets the first 64-bit number to 8 * length plain, and the second to 8* length auth
let lengths_BE = two_two_to_four (Mktwo (nat_to_two 32 (8 * length auth)) (nat_to_two 32 (8 * length plain))) in
let zero_padded_c_BE = be_bytes_to_seq_quad32 (pad_to_128_bits c) in
let zero_padded_a_BE = be_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_BE = append zero_padded_a_BE (append zero_padded_c_BE (create 1 lengths_BE)) in
let s_BE = ghash_BE h_BE hash_input_BE in
let t = gctr_encrypt j0_BE (be_quad32_to_bytes s_BE) alg key_BE in
(c, t) | false | false | Vale.AES.GCM_BE_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm_encrypt_BE_reveal : _: Prims.unit
-> FStar.Pervasives.Lemma
(ensures Vale.AES.GCM_BE_s.gcm_encrypt_BE == Vale.AES.GCM_BE_s.gcm_encrypt_BE_def) | [] | Vale.AES.GCM_BE_s.gcm_encrypt_BE_reveal | {
"file_name": "vale/specs/crypto/Vale.AES.GCM_BE_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | _: Prims.unit
-> FStar.Pervasives.Lemma
(ensures Vale.AES.GCM_BE_s.gcm_encrypt_BE == Vale.AES.GCM_BE_s.gcm_encrypt_BE_def) | {
"end_col": 108,
"end_line": 63,
"start_col": 40,
"start_line": 63
} |
|
FStar.Pervasives.Lemma | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm_decrypt_BE_reveal = opaque_revealer (`%gcm_decrypt_BE) gcm_decrypt_BE gcm_decrypt_BE_def | let gcm_decrypt_BE_reveal = | false | null | true | opaque_revealer (`%gcm_decrypt_BE) gcm_decrypt_BE gcm_decrypt_BE_def | {
"checked_file": "Vale.AES.GCM_BE_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_BE_s.fst.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM_BE_s.fst"
} | [
"lemma"
] | [
"Vale.Def.Opaque_s.opaque_revealer",
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat8",
"Vale.AES.GCM_BE_s.supported_iv_BE",
"FStar.Pervasives.Native.tuple2",
"Prims.bool",
"Prims.l_and",
"Vale.AES.AES_common_s.is_aes_key",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"Vale.Def.Words_s.pow2_32",
"Prims.l_True",
"Vale.AES.GCM_BE_s.gcm_decrypt_BE",
"Vale.AES.GCM_BE_s.gcm_decrypt_BE_def"
] | [] | module Vale.AES.GCM_BE_s
open Vale.Arch.Types
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Two_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GHash_BE_s
open FStar.Seq
open FStar.Mul
#reset-options "--z3rlimit 30"
type supported_iv_BE:eqtype = iv:seq nat8 { 1 <= 8 * (length iv) /\ 8 * (length iv) < pow2_64 }
let compute_iv_BE_def (h_BE:quad32) (iv:supported_iv_BE) : quad32
=
if 8 * (length iv) = 96 then (
let iv_BE = be_bytes_to_quad32 (pad_to_128_bits iv) in
let j0_BE = Mkfour 1 iv_BE.lo1 iv_BE.hi2 iv_BE.hi3 in
j0_BE
) else (
let padded_iv_quads = be_bytes_to_seq_quad32 (pad_to_128_bits iv) in
let length_BE = two_two_to_four (Mktwo (nat_to_two 32 0) (nat_to_two 32 (8 * length iv))) in
let hash_input_BE = append padded_iv_quads (create 1 length_BE) in
let hash_output_BE = ghash_BE h_BE hash_input_BE in
hash_output_BE
)
[@"opaque_to_smt"] let compute_iv_BE = opaque_make compute_iv_BE_def
irreducible let compute_iv_BE_reveal = opaque_revealer (`%compute_iv_BE) compute_iv_BE compute_iv_BE_def
let gcm_encrypt_BE_def (alg:algorithm) (key:seq nat8) (iv:supported_iv_BE) (plain:seq nat8) (auth:seq nat8) :
Pure (seq nat8 & seq nat8)
(requires
is_aes_key alg key /\
length plain < pow2_32 /\
length auth < pow2_32
)
(ensures fun (c, t) -> True)
=
let key_BE = seq_nat8_to_seq_nat32_BE key in
let h_BE = aes_encrypt_word alg key_BE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_BE iv in
let c = gctr_encrypt (inc32 j0_BE 1) plain alg key_BE in
// Sets the first 64-bit number to 8 * length plain, and the second to 8* length auth
let lengths_BE = two_two_to_four (Mktwo (nat_to_two 32 (8 * length auth)) (nat_to_two 32 (8 * length plain))) in
let zero_padded_c_BE = be_bytes_to_seq_quad32 (pad_to_128_bits c) in
let zero_padded_a_BE = be_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_BE = append zero_padded_a_BE (append zero_padded_c_BE (create 1 lengths_BE)) in
let s_BE = ghash_BE h_BE hash_input_BE in
let t = gctr_encrypt j0_BE (be_quad32_to_bytes s_BE) alg key_BE in
(c, t)
[@"opaque_to_smt"] let gcm_encrypt_BE = opaque_make gcm_encrypt_BE_def
irreducible let gcm_encrypt_BE_reveal = opaque_revealer (`%gcm_encrypt_BE) gcm_encrypt_BE gcm_encrypt_BE_def
let gcm_decrypt_BE_def (alg:algorithm) (key:seq nat8) (iv:supported_iv_BE) (cipher:seq nat8) (auth:seq nat8) (tag:seq nat8) :
Pure (seq nat8 & bool)
(requires
is_aes_key alg key /\
length cipher < pow2_32 /\
length auth < pow2_32
)
(ensures fun (p, t) -> True)
=
let key_BE = seq_nat8_to_seq_nat32_BE key in
let h_BE = aes_encrypt_word alg key_BE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_BE iv in
let p = gctr_encrypt (inc32 j0_BE 1) cipher alg key_BE in // TODO: Rename gctr_encrypt to gctr
let lengths_BE = two_two_to_four (Mktwo (nat_to_two 32 (8 * length auth)) (nat_to_two 32 (8 * length cipher))) in
let zero_padded_c_BE = be_bytes_to_seq_quad32 (pad_to_128_bits cipher) in
let zero_padded_a_BE = be_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_BE = append zero_padded_a_BE (append zero_padded_c_BE (create 1 lengths_BE)) in
let s_BE = ghash_BE h_BE hash_input_BE in
let t = gctr_encrypt j0_BE (be_quad32_to_bytes s_BE) alg key_BE in
(p, t = tag) | false | false | Vale.AES.GCM_BE_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm_decrypt_BE_reveal : _: Prims.unit
-> FStar.Pervasives.Lemma
(ensures Vale.AES.GCM_BE_s.gcm_decrypt_BE == Vale.AES.GCM_BE_s.gcm_decrypt_BE_def) | [] | Vale.AES.GCM_BE_s.gcm_decrypt_BE_reveal | {
"file_name": "vale/specs/crypto/Vale.AES.GCM_BE_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | _: Prims.unit
-> FStar.Pervasives.Lemma
(ensures Vale.AES.GCM_BE_s.gcm_decrypt_BE == Vale.AES.GCM_BE_s.gcm_decrypt_BE_def) | {
"end_col": 108,
"end_line": 91,
"start_col": 40,
"start_line": 91
} |
|
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm_decrypt_BE = opaque_make gcm_decrypt_BE_def | let gcm_decrypt_BE = | false | null | false | opaque_make gcm_decrypt_BE_def | {
"checked_file": "Vale.AES.GCM_BE_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_BE_s.fst.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM_BE_s.fst"
} | [] | [
"Vale.Def.Opaque_s.opaque_make",
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat8",
"Vale.AES.GCM_BE_s.supported_iv_BE",
"FStar.Pervasives.Native.tuple2",
"Prims.bool",
"Prims.l_and",
"Vale.AES.AES_common_s.is_aes_key",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Seq.Base.length",
"Vale.Def.Words_s.pow2_32",
"Prims.l_True",
"Vale.AES.GCM_BE_s.gcm_decrypt_BE_def"
] | [] | module Vale.AES.GCM_BE_s
open Vale.Arch.Types
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Two_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GHash_BE_s
open FStar.Seq
open FStar.Mul
#reset-options "--z3rlimit 30"
type supported_iv_BE:eqtype = iv:seq nat8 { 1 <= 8 * (length iv) /\ 8 * (length iv) < pow2_64 }
let compute_iv_BE_def (h_BE:quad32) (iv:supported_iv_BE) : quad32
=
if 8 * (length iv) = 96 then (
let iv_BE = be_bytes_to_quad32 (pad_to_128_bits iv) in
let j0_BE = Mkfour 1 iv_BE.lo1 iv_BE.hi2 iv_BE.hi3 in
j0_BE
) else (
let padded_iv_quads = be_bytes_to_seq_quad32 (pad_to_128_bits iv) in
let length_BE = two_two_to_four (Mktwo (nat_to_two 32 0) (nat_to_two 32 (8 * length iv))) in
let hash_input_BE = append padded_iv_quads (create 1 length_BE) in
let hash_output_BE = ghash_BE h_BE hash_input_BE in
hash_output_BE
)
[@"opaque_to_smt"] let compute_iv_BE = opaque_make compute_iv_BE_def
irreducible let compute_iv_BE_reveal = opaque_revealer (`%compute_iv_BE) compute_iv_BE compute_iv_BE_def
let gcm_encrypt_BE_def (alg:algorithm) (key:seq nat8) (iv:supported_iv_BE) (plain:seq nat8) (auth:seq nat8) :
Pure (seq nat8 & seq nat8)
(requires
is_aes_key alg key /\
length plain < pow2_32 /\
length auth < pow2_32
)
(ensures fun (c, t) -> True)
=
let key_BE = seq_nat8_to_seq_nat32_BE key in
let h_BE = aes_encrypt_word alg key_BE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_BE iv in
let c = gctr_encrypt (inc32 j0_BE 1) plain alg key_BE in
// Sets the first 64-bit number to 8 * length plain, and the second to 8* length auth
let lengths_BE = two_two_to_four (Mktwo (nat_to_two 32 (8 * length auth)) (nat_to_two 32 (8 * length plain))) in
let zero_padded_c_BE = be_bytes_to_seq_quad32 (pad_to_128_bits c) in
let zero_padded_a_BE = be_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_BE = append zero_padded_a_BE (append zero_padded_c_BE (create 1 lengths_BE)) in
let s_BE = ghash_BE h_BE hash_input_BE in
let t = gctr_encrypt j0_BE (be_quad32_to_bytes s_BE) alg key_BE in
(c, t)
[@"opaque_to_smt"] let gcm_encrypt_BE = opaque_make gcm_encrypt_BE_def
irreducible let gcm_encrypt_BE_reveal = opaque_revealer (`%gcm_encrypt_BE) gcm_encrypt_BE gcm_encrypt_BE_def
let gcm_decrypt_BE_def (alg:algorithm) (key:seq nat8) (iv:supported_iv_BE) (cipher:seq nat8) (auth:seq nat8) (tag:seq nat8) :
Pure (seq nat8 & bool)
(requires
is_aes_key alg key /\
length cipher < pow2_32 /\
length auth < pow2_32
)
(ensures fun (p, t) -> True)
=
let key_BE = seq_nat8_to_seq_nat32_BE key in
let h_BE = aes_encrypt_word alg key_BE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_BE iv in
let p = gctr_encrypt (inc32 j0_BE 1) cipher alg key_BE in // TODO: Rename gctr_encrypt to gctr
let lengths_BE = two_two_to_four (Mktwo (nat_to_two 32 (8 * length auth)) (nat_to_two 32 (8 * length cipher))) in
let zero_padded_c_BE = be_bytes_to_seq_quad32 (pad_to_128_bits cipher) in
let zero_padded_a_BE = be_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_BE = append zero_padded_a_BE (append zero_padded_c_BE (create 1 lengths_BE)) in
let s_BE = ghash_BE h_BE hash_input_BE in
let t = gctr_encrypt j0_BE (be_quad32_to_bytes s_BE) alg key_BE in | false | false | Vale.AES.GCM_BE_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm_decrypt_BE : alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
iv: Vale.AES.GCM_BE_s.supported_iv_BE ->
cipher: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
auth: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
tag: FStar.Seq.Base.seq Vale.Def.Types_s.nat8
-> Prims.Pure (FStar.Seq.Base.seq Vale.Def.Types_s.nat8 * Prims.bool) | [] | Vale.AES.GCM_BE_s.gcm_decrypt_BE | {
"file_name": "vale/specs/crypto/Vale.AES.GCM_BE_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
iv: Vale.AES.GCM_BE_s.supported_iv_BE ->
cipher: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
auth: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
tag: FStar.Seq.Base.seq Vale.Def.Types_s.nat8
-> Prims.Pure (FStar.Seq.Base.seq Vale.Def.Types_s.nat8 * Prims.bool) | {
"end_col": 70,
"end_line": 90,
"start_col": 40,
"start_line": 90
} |
|
Prims.Tot | val compute_iv_BE_def (h_BE: quad32) (iv: supported_iv_BE) : quad32 | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let compute_iv_BE_def (h_BE:quad32) (iv:supported_iv_BE) : quad32
=
if 8 * (length iv) = 96 then (
let iv_BE = be_bytes_to_quad32 (pad_to_128_bits iv) in
let j0_BE = Mkfour 1 iv_BE.lo1 iv_BE.hi2 iv_BE.hi3 in
j0_BE
) else (
let padded_iv_quads = be_bytes_to_seq_quad32 (pad_to_128_bits iv) in
let length_BE = two_two_to_four (Mktwo (nat_to_two 32 0) (nat_to_two 32 (8 * length iv))) in
let hash_input_BE = append padded_iv_quads (create 1 length_BE) in
let hash_output_BE = ghash_BE h_BE hash_input_BE in
hash_output_BE
) | val compute_iv_BE_def (h_BE: quad32) (iv: supported_iv_BE) : quad32
let compute_iv_BE_def (h_BE: quad32) (iv: supported_iv_BE) : quad32 = | false | null | false | if 8 * (length iv) = 96
then
(let iv_BE = be_bytes_to_quad32 (pad_to_128_bits iv) in
let j0_BE = Mkfour 1 iv_BE.lo1 iv_BE.hi2 iv_BE.hi3 in
j0_BE)
else
(let padded_iv_quads = be_bytes_to_seq_quad32 (pad_to_128_bits iv) in
let length_BE = two_two_to_four (Mktwo (nat_to_two 32 0) (nat_to_two 32 (8 * length iv))) in
let hash_input_BE = append padded_iv_quads (create 1 length_BE) in
let hash_output_BE = ghash_BE h_BE hash_input_BE in
hash_output_BE) | {
"checked_file": "Vale.AES.GCM_BE_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_BE_s.fst.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM_BE_s.fst"
} | [
"total"
] | [
"Vale.Def.Types_s.quad32",
"Vale.AES.GCM_BE_s.supported_iv_BE",
"Prims.op_Equality",
"Prims.int",
"FStar.Mul.op_Star",
"FStar.Seq.Base.length",
"Vale.Def.Types_s.nat8",
"Vale.Def.Words_s.four",
"Vale.Def.Words_s.nat32",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.nat32",
"Vale.Def.Words_s.__proj__Mkfour__item__lo1",
"Vale.Def.Words_s.__proj__Mkfour__item__hi2",
"Vale.Def.Words_s.__proj__Mkfour__item__hi3",
"Vale.Def.Types_s.be_bytes_to_quad32",
"Vale.AES.GCTR_BE_s.pad_to_128_bits",
"Prims.bool",
"Vale.AES.GHash_BE_s.ghash_BE",
"FStar.Seq.Base.seq",
"FStar.Seq.Base.append",
"FStar.Seq.Base.create",
"Vale.Def.Words_s.natN",
"Prims.pow2",
"Vale.Def.Words.Four_s.two_two_to_four",
"Vale.Def.Words_s.Mktwo",
"Vale.Def.Words_s.two",
"Vale.Def.Words.Two_s.nat_to_two",
"Vale.Def.Types_s.be_bytes_to_seq_quad32"
] | [] | module Vale.AES.GCM_BE_s
open Vale.Arch.Types
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Two_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GHash_BE_s
open FStar.Seq
open FStar.Mul
#reset-options "--z3rlimit 30"
type supported_iv_BE:eqtype = iv:seq nat8 { 1 <= 8 * (length iv) /\ 8 * (length iv) < pow2_64 }
let compute_iv_BE_def (h_BE:quad32) (iv:supported_iv_BE) : quad32 | false | true | Vale.AES.GCM_BE_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val compute_iv_BE_def (h_BE: quad32) (iv: supported_iv_BE) : quad32 | [] | Vale.AES.GCM_BE_s.compute_iv_BE_def | {
"file_name": "vale/specs/crypto/Vale.AES.GCM_BE_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | h_BE: Vale.Def.Types_s.quad32 -> iv: Vale.AES.GCM_BE_s.supported_iv_BE -> Vale.Def.Types_s.quad32 | {
"end_col": 3,
"end_line": 32,
"start_col": 2,
"start_line": 22
} |
Prims.Pure | val gcm_encrypt_BE_def (alg: algorithm) (key: seq nat8) (iv: supported_iv_BE) (plain auth: seq nat8)
: Pure (seq nat8 & seq nat8)
(requires is_aes_key alg key /\ length plain < pow2_32 /\ length auth < pow2_32)
(ensures fun (c, t) -> True) | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm_encrypt_BE_def (alg:algorithm) (key:seq nat8) (iv:supported_iv_BE) (plain:seq nat8) (auth:seq nat8) :
Pure (seq nat8 & seq nat8)
(requires
is_aes_key alg key /\
length plain < pow2_32 /\
length auth < pow2_32
)
(ensures fun (c, t) -> True)
=
let key_BE = seq_nat8_to_seq_nat32_BE key in
let h_BE = aes_encrypt_word alg key_BE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_BE iv in
let c = gctr_encrypt (inc32 j0_BE 1) plain alg key_BE in
// Sets the first 64-bit number to 8 * length plain, and the second to 8* length auth
let lengths_BE = two_two_to_four (Mktwo (nat_to_two 32 (8 * length auth)) (nat_to_two 32 (8 * length plain))) in
let zero_padded_c_BE = be_bytes_to_seq_quad32 (pad_to_128_bits c) in
let zero_padded_a_BE = be_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_BE = append zero_padded_a_BE (append zero_padded_c_BE (create 1 lengths_BE)) in
let s_BE = ghash_BE h_BE hash_input_BE in
let t = gctr_encrypt j0_BE (be_quad32_to_bytes s_BE) alg key_BE in
(c, t) | val gcm_encrypt_BE_def (alg: algorithm) (key: seq nat8) (iv: supported_iv_BE) (plain auth: seq nat8)
: Pure (seq nat8 & seq nat8)
(requires is_aes_key alg key /\ length plain < pow2_32 /\ length auth < pow2_32)
(ensures fun (c, t) -> True)
let gcm_encrypt_BE_def (alg: algorithm) (key: seq nat8) (iv: supported_iv_BE) (plain auth: seq nat8)
: Pure (seq nat8 & seq nat8)
(requires is_aes_key alg key /\ length plain < pow2_32 /\ length auth < pow2_32)
(ensures fun (c, t) -> True) = | false | null | false | let key_BE = seq_nat8_to_seq_nat32_BE key in
let h_BE = aes_encrypt_word alg key_BE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_BE iv in
let c = gctr_encrypt (inc32 j0_BE 1) plain alg key_BE in
let lengths_BE =
two_two_to_four (Mktwo (nat_to_two 32 (8 * length auth)) (nat_to_two 32 (8 * length plain)))
in
let zero_padded_c_BE = be_bytes_to_seq_quad32 (pad_to_128_bits c) in
let zero_padded_a_BE = be_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_BE = append zero_padded_a_BE (append zero_padded_c_BE (create 1 lengths_BE)) in
let s_BE = ghash_BE h_BE hash_input_BE in
let t = gctr_encrypt j0_BE (be_quad32_to_bytes s_BE) alg key_BE in
(c, t) | {
"checked_file": "Vale.AES.GCM_BE_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_BE_s.fst.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM_BE_s.fst"
} | [] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat8",
"Vale.AES.GCM_BE_s.supported_iv_BE",
"FStar.Pervasives.Native.Mktuple2",
"Vale.Def.Words_s.nat8",
"Vale.AES.GCTR_BE_s.gctr_encrypt",
"Vale.Arch.Types.be_quad32_to_bytes",
"Vale.Def.Types_s.quad32",
"Vale.AES.GHash_BE_s.ghash_BE",
"FStar.Seq.Base.append",
"FStar.Seq.Base.create",
"Vale.Def.Types_s.be_bytes_to_seq_quad32",
"Vale.AES.GCTR_BE_s.pad_to_128_bits",
"Vale.Def.Words_s.four",
"Vale.Def.Words_s.natN",
"Prims.pow2",
"Vale.Def.Words.Four_s.two_two_to_four",
"Vale.Def.Words_s.Mktwo",
"Vale.Def.Words_s.two",
"Vale.Def.Words.Two_s.nat_to_two",
"FStar.Mul.op_Star",
"FStar.Seq.Base.length",
"Vale.AES.GCTR_BE_s.inc32",
"Vale.AES.GCM_BE_s.compute_iv_BE",
"Vale.AES.AES_BE_s.aes_encrypt_word",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.nat32",
"Vale.Def.Words_s.nat32",
"Vale.Def.Words.Seq_s.seq_nat8_to_seq_nat32_BE",
"FStar.Pervasives.Native.tuple2",
"Prims.l_and",
"Vale.AES.AES_common_s.is_aes_key",
"Prims.b2t",
"Prims.op_LessThan",
"Vale.Def.Words_s.pow2_32",
"Prims.l_True"
] | [] | module Vale.AES.GCM_BE_s
open Vale.Arch.Types
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Two_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GHash_BE_s
open FStar.Seq
open FStar.Mul
#reset-options "--z3rlimit 30"
type supported_iv_BE:eqtype = iv:seq nat8 { 1 <= 8 * (length iv) /\ 8 * (length iv) < pow2_64 }
let compute_iv_BE_def (h_BE:quad32) (iv:supported_iv_BE) : quad32
=
if 8 * (length iv) = 96 then (
let iv_BE = be_bytes_to_quad32 (pad_to_128_bits iv) in
let j0_BE = Mkfour 1 iv_BE.lo1 iv_BE.hi2 iv_BE.hi3 in
j0_BE
) else (
let padded_iv_quads = be_bytes_to_seq_quad32 (pad_to_128_bits iv) in
let length_BE = two_two_to_four (Mktwo (nat_to_two 32 0) (nat_to_two 32 (8 * length iv))) in
let hash_input_BE = append padded_iv_quads (create 1 length_BE) in
let hash_output_BE = ghash_BE h_BE hash_input_BE in
hash_output_BE
)
[@"opaque_to_smt"] let compute_iv_BE = opaque_make compute_iv_BE_def
irreducible let compute_iv_BE_reveal = opaque_revealer (`%compute_iv_BE) compute_iv_BE compute_iv_BE_def
let gcm_encrypt_BE_def (alg:algorithm) (key:seq nat8) (iv:supported_iv_BE) (plain:seq nat8) (auth:seq nat8) :
Pure (seq nat8 & seq nat8)
(requires
is_aes_key alg key /\
length plain < pow2_32 /\
length auth < pow2_32
) | false | false | Vale.AES.GCM_BE_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm_encrypt_BE_def (alg: algorithm) (key: seq nat8) (iv: supported_iv_BE) (plain auth: seq nat8)
: Pure (seq nat8 & seq nat8)
(requires is_aes_key alg key /\ length plain < pow2_32 /\ length auth < pow2_32)
(ensures fun (c, t) -> True) | [] | Vale.AES.GCM_BE_s.gcm_encrypt_BE_def | {
"file_name": "vale/specs/crypto/Vale.AES.GCM_BE_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
iv: Vale.AES.GCM_BE_s.supported_iv_BE ->
plain: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
auth: FStar.Seq.Base.seq Vale.Def.Types_s.nat8
-> Prims.Pure
(FStar.Seq.Base.seq Vale.Def.Types_s.nat8 * FStar.Seq.Base.seq Vale.Def.Types_s.nat8) | {
"end_col": 8,
"end_line": 61,
"start_col": 3,
"start_line": 44
} |
Prims.Pure | val gcm_decrypt_BE_def
(alg: algorithm)
(key: seq nat8)
(iv: supported_iv_BE)
(cipher auth tag: seq nat8)
: Pure (seq nat8 & bool)
(requires is_aes_key alg key /\ length cipher < pow2_32 /\ length auth < pow2_32)
(ensures fun (p, t) -> True) | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_BE_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Two_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gcm_decrypt_BE_def (alg:algorithm) (key:seq nat8) (iv:supported_iv_BE) (cipher:seq nat8) (auth:seq nat8) (tag:seq nat8) :
Pure (seq nat8 & bool)
(requires
is_aes_key alg key /\
length cipher < pow2_32 /\
length auth < pow2_32
)
(ensures fun (p, t) -> True)
=
let key_BE = seq_nat8_to_seq_nat32_BE key in
let h_BE = aes_encrypt_word alg key_BE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_BE iv in
let p = gctr_encrypt (inc32 j0_BE 1) cipher alg key_BE in // TODO: Rename gctr_encrypt to gctr
let lengths_BE = two_two_to_four (Mktwo (nat_to_two 32 (8 * length auth)) (nat_to_two 32 (8 * length cipher))) in
let zero_padded_c_BE = be_bytes_to_seq_quad32 (pad_to_128_bits cipher) in
let zero_padded_a_BE = be_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_BE = append zero_padded_a_BE (append zero_padded_c_BE (create 1 lengths_BE)) in
let s_BE = ghash_BE h_BE hash_input_BE in
let t = gctr_encrypt j0_BE (be_quad32_to_bytes s_BE) alg key_BE in
(p, t = tag) | val gcm_decrypt_BE_def
(alg: algorithm)
(key: seq nat8)
(iv: supported_iv_BE)
(cipher auth tag: seq nat8)
: Pure (seq nat8 & bool)
(requires is_aes_key alg key /\ length cipher < pow2_32 /\ length auth < pow2_32)
(ensures fun (p, t) -> True)
let gcm_decrypt_BE_def
(alg: algorithm)
(key: seq nat8)
(iv: supported_iv_BE)
(cipher auth tag: seq nat8)
: Pure (seq nat8 & bool)
(requires is_aes_key alg key /\ length cipher < pow2_32 /\ length auth < pow2_32)
(ensures fun (p, t) -> True) = | false | null | false | let key_BE = seq_nat8_to_seq_nat32_BE key in
let h_BE = aes_encrypt_word alg key_BE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_BE iv in
let p = gctr_encrypt (inc32 j0_BE 1) cipher alg key_BE in
let lengths_BE =
two_two_to_four (Mktwo (nat_to_two 32 (8 * length auth)) (nat_to_two 32 (8 * length cipher)))
in
let zero_padded_c_BE = be_bytes_to_seq_quad32 (pad_to_128_bits cipher) in
let zero_padded_a_BE = be_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_BE = append zero_padded_a_BE (append zero_padded_c_BE (create 1 lengths_BE)) in
let s_BE = ghash_BE h_BE hash_input_BE in
let t = gctr_encrypt j0_BE (be_quad32_to_bytes s_BE) alg key_BE in
(p, t = tag) | {
"checked_file": "Vale.AES.GCM_BE_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Two_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GHash_BE_s.fst.checked",
"Vale.AES.GCTR_BE_s.fst.checked",
"Vale.AES.AES_BE_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM_BE_s.fst"
} | [] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.nat8",
"Vale.AES.GCM_BE_s.supported_iv_BE",
"FStar.Pervasives.Native.Mktuple2",
"Prims.bool",
"Prims.op_Equality",
"Vale.Def.Words_s.nat8",
"Vale.AES.GCTR_BE_s.gctr_encrypt",
"Vale.Arch.Types.be_quad32_to_bytes",
"Vale.Def.Types_s.quad32",
"Vale.AES.GHash_BE_s.ghash_BE",
"FStar.Seq.Base.append",
"FStar.Seq.Base.create",
"Vale.Def.Types_s.be_bytes_to_seq_quad32",
"Vale.AES.GCTR_BE_s.pad_to_128_bits",
"Vale.Def.Words_s.four",
"Vale.Def.Words_s.natN",
"Prims.pow2",
"Vale.Def.Words.Four_s.two_two_to_four",
"Vale.Def.Words_s.Mktwo",
"Vale.Def.Words_s.two",
"Vale.Def.Words.Two_s.nat_to_two",
"FStar.Mul.op_Star",
"FStar.Seq.Base.length",
"Vale.AES.GCTR_BE_s.inc32",
"Vale.AES.GCM_BE_s.compute_iv_BE",
"Vale.AES.AES_BE_s.aes_encrypt_word",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.nat32",
"Vale.Def.Words_s.nat32",
"Vale.Def.Words.Seq_s.seq_nat8_to_seq_nat32_BE",
"FStar.Pervasives.Native.tuple2",
"Prims.l_and",
"Vale.AES.AES_common_s.is_aes_key",
"Prims.b2t",
"Prims.op_LessThan",
"Vale.Def.Words_s.pow2_32",
"Prims.l_True"
] | [] | module Vale.AES.GCM_BE_s
open Vale.Arch.Types
open Vale.Def.Opaque_s
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Two_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.AES.AES_BE_s
open Vale.AES.GCTR_BE_s
open Vale.AES.GHash_BE_s
open FStar.Seq
open FStar.Mul
#reset-options "--z3rlimit 30"
type supported_iv_BE:eqtype = iv:seq nat8 { 1 <= 8 * (length iv) /\ 8 * (length iv) < pow2_64 }
let compute_iv_BE_def (h_BE:quad32) (iv:supported_iv_BE) : quad32
=
if 8 * (length iv) = 96 then (
let iv_BE = be_bytes_to_quad32 (pad_to_128_bits iv) in
let j0_BE = Mkfour 1 iv_BE.lo1 iv_BE.hi2 iv_BE.hi3 in
j0_BE
) else (
let padded_iv_quads = be_bytes_to_seq_quad32 (pad_to_128_bits iv) in
let length_BE = two_two_to_four (Mktwo (nat_to_two 32 0) (nat_to_two 32 (8 * length iv))) in
let hash_input_BE = append padded_iv_quads (create 1 length_BE) in
let hash_output_BE = ghash_BE h_BE hash_input_BE in
hash_output_BE
)
[@"opaque_to_smt"] let compute_iv_BE = opaque_make compute_iv_BE_def
irreducible let compute_iv_BE_reveal = opaque_revealer (`%compute_iv_BE) compute_iv_BE compute_iv_BE_def
let gcm_encrypt_BE_def (alg:algorithm) (key:seq nat8) (iv:supported_iv_BE) (plain:seq nat8) (auth:seq nat8) :
Pure (seq nat8 & seq nat8)
(requires
is_aes_key alg key /\
length plain < pow2_32 /\
length auth < pow2_32
)
(ensures fun (c, t) -> True)
=
let key_BE = seq_nat8_to_seq_nat32_BE key in
let h_BE = aes_encrypt_word alg key_BE (Mkfour 0 0 0 0) in
let j0_BE = compute_iv_BE h_BE iv in
let c = gctr_encrypt (inc32 j0_BE 1) plain alg key_BE in
// Sets the first 64-bit number to 8 * length plain, and the second to 8* length auth
let lengths_BE = two_two_to_four (Mktwo (nat_to_two 32 (8 * length auth)) (nat_to_two 32 (8 * length plain))) in
let zero_padded_c_BE = be_bytes_to_seq_quad32 (pad_to_128_bits c) in
let zero_padded_a_BE = be_bytes_to_seq_quad32 (pad_to_128_bits auth) in
let hash_input_BE = append zero_padded_a_BE (append zero_padded_c_BE (create 1 lengths_BE)) in
let s_BE = ghash_BE h_BE hash_input_BE in
let t = gctr_encrypt j0_BE (be_quad32_to_bytes s_BE) alg key_BE in
(c, t)
[@"opaque_to_smt"] let gcm_encrypt_BE = opaque_make gcm_encrypt_BE_def
irreducible let gcm_encrypt_BE_reveal = opaque_revealer (`%gcm_encrypt_BE) gcm_encrypt_BE gcm_encrypt_BE_def
let gcm_decrypt_BE_def (alg:algorithm) (key:seq nat8) (iv:supported_iv_BE) (cipher:seq nat8) (auth:seq nat8) (tag:seq nat8) :
Pure (seq nat8 & bool)
(requires
is_aes_key alg key /\
length cipher < pow2_32 /\
length auth < pow2_32
) | false | false | Vale.AES.GCM_BE_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 30,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gcm_decrypt_BE_def
(alg: algorithm)
(key: seq nat8)
(iv: supported_iv_BE)
(cipher auth tag: seq nat8)
: Pure (seq nat8 & bool)
(requires is_aes_key alg key /\ length cipher < pow2_32 /\ length auth < pow2_32)
(ensures fun (p, t) -> True) | [] | Vale.AES.GCM_BE_s.gcm_decrypt_BE_def | {
"file_name": "vale/specs/crypto/Vale.AES.GCM_BE_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
iv: Vale.AES.GCM_BE_s.supported_iv_BE ->
cipher: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
auth: FStar.Seq.Base.seq Vale.Def.Types_s.nat8 ->
tag: FStar.Seq.Base.seq Vale.Def.Types_s.nat8
-> Prims.Pure (FStar.Seq.Base.seq Vale.Def.Types_s.nat8 * Prims.bool) | {
"end_col": 14,
"end_line": 89,
"start_col": 3,
"start_line": 73
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let seqperm (#a:Type) (s0:seq a) (s1:seq a) =
f:index_fun s0 { is_permutation s0 s1 f } | let seqperm (#a: Type) (s0 s1: seq a) = | false | null | false | f: index_fun s0 {is_permutation s0 s1 f} | {
"checked_file": "FStar.Seq.Permutation.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Equiv.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Permutation.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"FStar.Seq.Permutation.index_fun",
"FStar.Seq.Permutation.is_permutation"
] | [] | (*
Copyright 2021-2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: N. Swamy, A. Rastogi, A. Rozanov
*)
module FStar.Seq.Permutation
open FStar.Seq
open FStar.IntegerIntervals
(* This module defines a permutation on sequences as a bijection among
the sequence indices relating equal elements.
It defines a few utilities to work with such permutations.
Notably:
1. Given two sequence with equal element counts, it constructs a
permutation.
2. Folding the multiplication of a commutative monoid over a
sequence and its permutation produces the equivalent results
*)
(* A function from the indices of `s` to itself *)
let index_fun #a (s:seq a) = under (Seq.length s) -> under (Seq.length s)
(* An abstract predicate defining when an index_fun is a permutation *)
val is_permutation (#a:Type) (s0:seq a) (s1:seq a) (f:index_fun s0) : prop
(* Revealing the interpretation of is_permutation *)
val reveal_is_permutation (#a:Type) (s0 s1:seq a) (f:index_fun s0)
: Lemma (is_permutation s0 s1 f <==>
(* lengths of the sequences are the same *)
Seq.length s0 == Seq.length s1 /\
(* f is injective *)
(forall x y. {:pattern f x; f y}
x <> y ==> f x <> f y) /\
(* and f relates equal items in s0 and s1 *)
(forall (i:nat{i < Seq.length s0}).{:pattern (Seq.index s1 (f i))}
Seq.index s0 i == Seq.index s1 (f i)))
(* A seqperm is an index_fun that is also a permutation *) | false | false | FStar.Seq.Permutation.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val seqperm : s0: FStar.Seq.Base.seq a -> s1: FStar.Seq.Base.seq a -> Type0 | [] | FStar.Seq.Permutation.seqperm | {
"file_name": "ulib/FStar.Seq.Permutation.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s0: FStar.Seq.Base.seq a -> s1: FStar.Seq.Base.seq a -> Type0 | {
"end_col": 43,
"end_line": 58,
"start_col": 2,
"start_line": 58
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let index_fun #a (s:seq a) = under (Seq.length s) -> under (Seq.length s) | let index_fun #a (s: seq a) = | false | null | false | under (Seq.length s) -> under (Seq.length s) | {
"checked_file": "FStar.Seq.Permutation.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Equiv.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Permutation.fsti"
} | [
"total"
] | [
"FStar.Seq.Base.seq",
"FStar.IntegerIntervals.under",
"FStar.Seq.Base.length"
] | [] | (*
Copyright 2021-2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: N. Swamy, A. Rastogi, A. Rozanov
*)
module FStar.Seq.Permutation
open FStar.Seq
open FStar.IntegerIntervals
(* This module defines a permutation on sequences as a bijection among
the sequence indices relating equal elements.
It defines a few utilities to work with such permutations.
Notably:
1. Given two sequence with equal element counts, it constructs a
permutation.
2. Folding the multiplication of a commutative monoid over a
sequence and its permutation produces the equivalent results
*) | false | false | FStar.Seq.Permutation.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val index_fun : s: FStar.Seq.Base.seq a -> Type0 | [] | FStar.Seq.Permutation.index_fun | {
"file_name": "ulib/FStar.Seq.Permutation.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | s: FStar.Seq.Base.seq a -> Type0 | {
"end_col": 73,
"end_line": 39,
"start_col": 29,
"start_line": 39
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let foldm_snoc (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s:seq a) =
foldr_snoc m.mult s m.unit | let foldm_snoc (#a: Type) (#eq: CE.equiv a) (m: CE.cm a eq) (s: seq a) = | false | null | false | foldr_snoc m.mult s m.unit | {
"checked_file": "FStar.Seq.Permutation.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Equiv.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Permutation.fsti"
} | [
"total"
] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"FStar.Seq.Base.seq",
"FStar.Seq.Properties.foldr_snoc",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__unit"
] | [] | (*
Copyright 2021-2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: N. Swamy, A. Rastogi, A. Rozanov
*)
module FStar.Seq.Permutation
open FStar.Seq
open FStar.IntegerIntervals
(* This module defines a permutation on sequences as a bijection among
the sequence indices relating equal elements.
It defines a few utilities to work with such permutations.
Notably:
1. Given two sequence with equal element counts, it constructs a
permutation.
2. Folding the multiplication of a commutative monoid over a
sequence and its permutation produces the equivalent results
*)
(* A function from the indices of `s` to itself *)
let index_fun #a (s:seq a) = under (Seq.length s) -> under (Seq.length s)
(* An abstract predicate defining when an index_fun is a permutation *)
val is_permutation (#a:Type) (s0:seq a) (s1:seq a) (f:index_fun s0) : prop
(* Revealing the interpretation of is_permutation *)
val reveal_is_permutation (#a:Type) (s0 s1:seq a) (f:index_fun s0)
: Lemma (is_permutation s0 s1 f <==>
(* lengths of the sequences are the same *)
Seq.length s0 == Seq.length s1 /\
(* f is injective *)
(forall x y. {:pattern f x; f y}
x <> y ==> f x <> f y) /\
(* and f relates equal items in s0 and s1 *)
(forall (i:nat{i < Seq.length s0}).{:pattern (Seq.index s1 (f i))}
Seq.index s0 i == Seq.index s1 (f i)))
(* A seqperm is an index_fun that is also a permutation *)
let seqperm (#a:Type) (s0:seq a) (s1:seq a) =
f:index_fun s0 { is_permutation s0 s1 f }
(* We can construct a permutation from
sequences whose element counts are the same *)
val permutation_from_equal_counts
(#a:eqtype)
(s0:seq a) (s1:seq a{(forall x. count x s0 == count x s1)})
: Tot (seqperm s0 s1)
(** Now, some utilities related to commutative monoids and permutations *)
module CE = FStar.Algebra.CommMonoid.Equiv
(* folding a m.mult over a sequence *) | false | false | FStar.Seq.Permutation.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val foldm_snoc : m: FStar.Algebra.CommMonoid.Equiv.cm a eq -> s: FStar.Seq.Base.seq a -> a | [] | FStar.Seq.Permutation.foldm_snoc | {
"file_name": "ulib/FStar.Seq.Permutation.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | m: FStar.Algebra.CommMonoid.Equiv.cm a eq -> s: FStar.Seq.Base.seq a -> a | {
"end_col": 28,
"end_line": 73,
"start_col": 2,
"start_line": 73
} |
|
Prims.Tot | val func_sum (#a #c #eq: _) (cm: CE.cm c eq) (f g: (a -> c))
: t: (a -> c){forall (x: a). t x == (f x) `cm.mult` (g x)} | [
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let func_sum #a #c #eq (cm: CE.cm c eq) (f g: a -> c)
: t:(a -> c){ forall (x:a). t x == f x `cm.mult` g x }
= fun (x:a) -> cm.mult (f x) (g x) | val func_sum (#a #c #eq: _) (cm: CE.cm c eq) (f g: (a -> c))
: t: (a -> c){forall (x: a). t x == (f x) `cm.mult` (g x)}
let func_sum #a #c #eq (cm: CE.cm c eq) (f: (a -> c)) (g: (a -> c))
: t: (a -> c){forall (x: a). t x == (f x) `cm.mult` (g x)} = | false | null | false | fun (x: a) -> cm.mult (f x) (g x) | {
"checked_file": "FStar.Seq.Permutation.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Equiv.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Permutation.fsti"
} | [
"total"
] | [
"FStar.Algebra.CommMonoid.Equiv.equiv",
"FStar.Algebra.CommMonoid.Equiv.cm",
"FStar.Algebra.CommMonoid.Equiv.__proj__CM__item__mult",
"Prims.l_Forall",
"Prims.eq2"
] | [] | (*
Copyright 2021-2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: N. Swamy, A. Rastogi, A. Rozanov
*)
module FStar.Seq.Permutation
open FStar.Seq
open FStar.IntegerIntervals
(* This module defines a permutation on sequences as a bijection among
the sequence indices relating equal elements.
It defines a few utilities to work with such permutations.
Notably:
1. Given two sequence with equal element counts, it constructs a
permutation.
2. Folding the multiplication of a commutative monoid over a
sequence and its permutation produces the equivalent results
*)
(* A function from the indices of `s` to itself *)
let index_fun #a (s:seq a) = under (Seq.length s) -> under (Seq.length s)
(* An abstract predicate defining when an index_fun is a permutation *)
val is_permutation (#a:Type) (s0:seq a) (s1:seq a) (f:index_fun s0) : prop
(* Revealing the interpretation of is_permutation *)
val reveal_is_permutation (#a:Type) (s0 s1:seq a) (f:index_fun s0)
: Lemma (is_permutation s0 s1 f <==>
(* lengths of the sequences are the same *)
Seq.length s0 == Seq.length s1 /\
(* f is injective *)
(forall x y. {:pattern f x; f y}
x <> y ==> f x <> f y) /\
(* and f relates equal items in s0 and s1 *)
(forall (i:nat{i < Seq.length s0}).{:pattern (Seq.index s1 (f i))}
Seq.index s0 i == Seq.index s1 (f i)))
(* A seqperm is an index_fun that is also a permutation *)
let seqperm (#a:Type) (s0:seq a) (s1:seq a) =
f:index_fun s0 { is_permutation s0 s1 f }
(* We can construct a permutation from
sequences whose element counts are the same *)
val permutation_from_equal_counts
(#a:eqtype)
(s0:seq a) (s1:seq a{(forall x. count x s0 == count x s1)})
: Tot (seqperm s0 s1)
(** Now, some utilities related to commutative monoids and permutations *)
module CE = FStar.Algebra.CommMonoid.Equiv
(* folding a m.mult over a sequence *)
let foldm_snoc (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s:seq a) =
foldr_snoc m.mult s m.unit
(* folding over a sequence of units is unit *)
val foldm_snoc_unit_seq (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s:Seq.seq a)
: Lemma (requires Seq.equal s (Seq.create (Seq.length s) m.unit))
(ensures eq.eq (foldm_snoc m s) m.unit)
(* folding over a singleton sequence is the sequence element *)
val foldm_snoc_singleton (#a:_) (#eq:_) (m:CE.cm a eq) (x:a)
: Lemma (eq.eq (foldm_snoc m (Seq.create 1 x)) x)
(* folding m over the concatenation of s1 and s2
can be decomposed into a fold over s1 and a fold over s2 *)
val foldm_snoc_append (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s1 s2: seq a)
: Lemma
(ensures eq.eq (foldm_snoc m (append s1 s2))
(m.mult (foldm_snoc m s1) (foldm_snoc m s2)))
(* folds over concatenated lists can is symmetric *)
val foldm_snoc_sym (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s1 s2: seq a)
: Lemma
(ensures eq.eq (foldm_snoc m (append s1 s2))
(foldm_snoc m (append s2 s1)))
(* And, finally, if s0 and s1 are permutations,
then folding m over them is identical *)
val foldm_snoc_perm (#a:_) (#eq:_)
(m:CE.cm a eq)
(s0:seq a)
(s1:seq a)
(p:seqperm s0 s1)
: Lemma
(ensures eq.eq (foldm_snoc m s0) (foldm_snoc m s1))
/// foldm_snoc_split: This next bit is for a lemma that proves that if
/// if the fold is taken over a sequence of sums, it is equal
/// to a sum of folds of the summand sequences
(* This constructs a sequence init function to be used to create
a sequence of function values in a given finite integer range *)
let init_func_from_expr #c (#n0: int) (#nk: not_less_than n0)
(expr: ifrom_ito n0 nk -> c)
(a: ifrom_ito n0 nk)
(b: ifrom_ito a nk)
(i: under (closed_interval_size a b))
: c
= expr (n0+i)
(* CommMonoid-induced pointwise sum of two functions *)
let func_sum #a #c #eq (cm: CE.cm c eq) (f g: a -> c) | false | false | FStar.Seq.Permutation.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val func_sum (#a #c #eq: _) (cm: CE.cm c eq) (f g: (a -> c))
: t: (a -> c){forall (x: a). t x == (f x) `cm.mult` (g x)} | [] | FStar.Seq.Permutation.func_sum | {
"file_name": "ulib/FStar.Seq.Permutation.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | cm: FStar.Algebra.CommMonoid.Equiv.cm c eq -> f: (_: a -> c) -> g: (_: a -> c)
-> t: (_: a -> c){forall (x: a). t x == CM?.mult cm (f x) (g x)} | {
"end_col": 36,
"end_line": 124,
"start_col": 4,
"start_line": 124
} |
Prims.Tot | val init_func_from_expr
(#c: _)
(#n0: int)
(#nk: not_less_than n0)
(expr: (ifrom_ito n0 nk -> c))
(a: ifrom_ito n0 nk)
(b: ifrom_ito a nk)
(i: under (closed_interval_size a b))
: c | [
{
"abbrev": true,
"full_module": "FStar.Algebra.CommMonoid.Equiv",
"short_module": "CE"
},
{
"abbrev": false,
"full_module": "FStar.IntegerIntervals",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let init_func_from_expr #c (#n0: int) (#nk: not_less_than n0)
(expr: ifrom_ito n0 nk -> c)
(a: ifrom_ito n0 nk)
(b: ifrom_ito a nk)
(i: under (closed_interval_size a b))
: c
= expr (n0+i) | val init_func_from_expr
(#c: _)
(#n0: int)
(#nk: not_less_than n0)
(expr: (ifrom_ito n0 nk -> c))
(a: ifrom_ito n0 nk)
(b: ifrom_ito a nk)
(i: under (closed_interval_size a b))
: c
let init_func_from_expr
#c
(#n0: int)
(#nk: not_less_than n0)
(expr: (ifrom_ito n0 nk -> c))
(a: ifrom_ito n0 nk)
(b: ifrom_ito a nk)
(i: under (closed_interval_size a b))
: c = | false | null | false | expr (n0 + i) | {
"checked_file": "FStar.Seq.Permutation.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Seq.Equiv.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.IntegerIntervals.fst.checked",
"FStar.Algebra.CommMonoid.Equiv.fst.checked"
],
"interface_file": false,
"source_file": "FStar.Seq.Permutation.fsti"
} | [
"total"
] | [
"Prims.int",
"FStar.IntegerIntervals.not_less_than",
"FStar.IntegerIntervals.ifrom_ito",
"FStar.IntegerIntervals.under",
"FStar.IntegerIntervals.closed_interval_size",
"Prims.op_Addition"
] | [] | (*
Copyright 2021-2022 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Authors: N. Swamy, A. Rastogi, A. Rozanov
*)
module FStar.Seq.Permutation
open FStar.Seq
open FStar.IntegerIntervals
(* This module defines a permutation on sequences as a bijection among
the sequence indices relating equal elements.
It defines a few utilities to work with such permutations.
Notably:
1. Given two sequence with equal element counts, it constructs a
permutation.
2. Folding the multiplication of a commutative monoid over a
sequence and its permutation produces the equivalent results
*)
(* A function from the indices of `s` to itself *)
let index_fun #a (s:seq a) = under (Seq.length s) -> under (Seq.length s)
(* An abstract predicate defining when an index_fun is a permutation *)
val is_permutation (#a:Type) (s0:seq a) (s1:seq a) (f:index_fun s0) : prop
(* Revealing the interpretation of is_permutation *)
val reveal_is_permutation (#a:Type) (s0 s1:seq a) (f:index_fun s0)
: Lemma (is_permutation s0 s1 f <==>
(* lengths of the sequences are the same *)
Seq.length s0 == Seq.length s1 /\
(* f is injective *)
(forall x y. {:pattern f x; f y}
x <> y ==> f x <> f y) /\
(* and f relates equal items in s0 and s1 *)
(forall (i:nat{i < Seq.length s0}).{:pattern (Seq.index s1 (f i))}
Seq.index s0 i == Seq.index s1 (f i)))
(* A seqperm is an index_fun that is also a permutation *)
let seqperm (#a:Type) (s0:seq a) (s1:seq a) =
f:index_fun s0 { is_permutation s0 s1 f }
(* We can construct a permutation from
sequences whose element counts are the same *)
val permutation_from_equal_counts
(#a:eqtype)
(s0:seq a) (s1:seq a{(forall x. count x s0 == count x s1)})
: Tot (seqperm s0 s1)
(** Now, some utilities related to commutative monoids and permutations *)
module CE = FStar.Algebra.CommMonoid.Equiv
(* folding a m.mult over a sequence *)
let foldm_snoc (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s:seq a) =
foldr_snoc m.mult s m.unit
(* folding over a sequence of units is unit *)
val foldm_snoc_unit_seq (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s:Seq.seq a)
: Lemma (requires Seq.equal s (Seq.create (Seq.length s) m.unit))
(ensures eq.eq (foldm_snoc m s) m.unit)
(* folding over a singleton sequence is the sequence element *)
val foldm_snoc_singleton (#a:_) (#eq:_) (m:CE.cm a eq) (x:a)
: Lemma (eq.eq (foldm_snoc m (Seq.create 1 x)) x)
(* folding m over the concatenation of s1 and s2
can be decomposed into a fold over s1 and a fold over s2 *)
val foldm_snoc_append (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s1 s2: seq a)
: Lemma
(ensures eq.eq (foldm_snoc m (append s1 s2))
(m.mult (foldm_snoc m s1) (foldm_snoc m s2)))
(* folds over concatenated lists can is symmetric *)
val foldm_snoc_sym (#a:Type) (#eq:CE.equiv a) (m:CE.cm a eq) (s1 s2: seq a)
: Lemma
(ensures eq.eq (foldm_snoc m (append s1 s2))
(foldm_snoc m (append s2 s1)))
(* And, finally, if s0 and s1 are permutations,
then folding m over them is identical *)
val foldm_snoc_perm (#a:_) (#eq:_)
(m:CE.cm a eq)
(s0:seq a)
(s1:seq a)
(p:seqperm s0 s1)
: Lemma
(ensures eq.eq (foldm_snoc m s0) (foldm_snoc m s1))
/// foldm_snoc_split: This next bit is for a lemma that proves that if
/// if the fold is taken over a sequence of sums, it is equal
/// to a sum of folds of the summand sequences
(* This constructs a sequence init function to be used to create
a sequence of function values in a given finite integer range *)
let init_func_from_expr #c (#n0: int) (#nk: not_less_than n0)
(expr: ifrom_ito n0 nk -> c)
(a: ifrom_ito n0 nk)
(b: ifrom_ito a nk)
(i: under (closed_interval_size a b)) | false | false | FStar.Seq.Permutation.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val init_func_from_expr
(#c: _)
(#n0: int)
(#nk: not_less_than n0)
(expr: (ifrom_ito n0 nk -> c))
(a: ifrom_ito n0 nk)
(b: ifrom_ito a nk)
(i: under (closed_interval_size a b))
: c | [] | FStar.Seq.Permutation.init_func_from_expr | {
"file_name": "ulib/FStar.Seq.Permutation.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
expr: (_: FStar.IntegerIntervals.ifrom_ito n0 nk -> c) ->
a: FStar.IntegerIntervals.ifrom_ito n0 nk ->
b: FStar.IntegerIntervals.ifrom_ito a nk ->
i: FStar.IntegerIntervals.under (FStar.IntegerIntervals.closed_interval_size a b)
-> c | {
"end_col": 15,
"end_line": 119,
"start_col": 4,
"start_line": 119
} |
Prims.Tot | val lte (a b: t) : Tot bool | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) | val lte (a b: t) : Tot bool
let lte (a b: t) : Tot bool = | false | null | false | lte #n (v a) (v b) | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [
"FStar.Int16.t",
"FStar.Int.lte",
"FStar.Int16.n",
"FStar.Int16.v",
"Prims.bool"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) | false | true | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lte (a b: t) : Tot bool | [] | FStar.Int16.lte | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | {
"end_col": 51,
"end_line": 118,
"start_col": 33,
"start_line": 118
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let n = 16 | let n = | false | null | false | 16 | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****) | false | true | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val n : Prims.int | [] | FStar.Int16.n | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.int | {
"end_col": 17,
"end_line": 20,
"start_col": 15,
"start_line": 20
} |
|
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Subtraction_Hat = sub | let op_Subtraction_Hat = | false | null | false | sub | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [] | [
"FStar.Int16.sub"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *) | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Subtraction_Hat : a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.Pure FStar.Int16.t | [] | FStar.Int16.op_Subtraction_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.Pure FStar.Int16.t | {
"end_col": 35,
"end_line": 122,
"start_col": 32,
"start_line": 122
} |
|
Prims.Tot | val eq (a b: t) : Tot bool | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) | val eq (a b: t) : Tot bool
let eq (a b: t) : Tot bool = | false | null | false | eq #n (v a) (v b) | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [
"FStar.Int16.t",
"FStar.Int.eq",
"FStar.Int16.n",
"FStar.Int16.v",
"Prims.bool"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c)) | false | true | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val eq (a b: t) : Tot bool | [] | FStar.Int16.eq | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | {
"end_col": 49,
"end_line": 114,
"start_col": 32,
"start_line": 114
} |
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Amp_Hat = logand | let op_Amp_Hat = | false | null | false | logand | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [] | [
"FStar.Int16.logand"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div
unfold let op_Percent_Hat = rem | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Amp_Hat : x: FStar.Int16.t -> y: FStar.Int16.t -> Prims.Pure FStar.Int16.t | [] | FStar.Int16.op_Amp_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | x: FStar.Int16.t -> y: FStar.Int16.t -> Prims.Pure FStar.Int16.t | {
"end_col": 30,
"end_line": 127,
"start_col": 24,
"start_line": 127
} |
|
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Plus_Hat = add | let op_Plus_Hat = | false | null | false | add | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [] | [
"FStar.Int16.add"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b) | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Plus_Hat : a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.Pure FStar.Int16.t | [] | FStar.Int16.op_Plus_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.Pure FStar.Int16.t | {
"end_col": 28,
"end_line": 121,
"start_col": 25,
"start_line": 121
} |
|
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Slash_Hat = div | let op_Slash_Hat = | false | null | false | div | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [] | [
"FStar.Int16.div"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Slash_Hat : a: FStar.Int16.t -> b: FStar.Int16.t{FStar.Int16.v b <> 0} -> Prims.Pure FStar.Int16.t | [] | FStar.Int16.op_Slash_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t{FStar.Int16.v b <> 0} -> Prims.Pure FStar.Int16.t | {
"end_col": 29,
"end_line": 124,
"start_col": 26,
"start_line": 124
} |
|
Prims.Tot | val lt (a b: t) : Tot bool | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b) | val lt (a b: t) : Tot bool
let lt (a b: t) : Tot bool = | false | null | false | lt #n (v a) (v b) | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [
"FStar.Int16.t",
"FStar.Int.lt",
"FStar.Int16.n",
"FStar.Int16.v",
"Prims.bool"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) | false | true | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lt (a b: t) : Tot bool | [] | FStar.Int16.lt | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | {
"end_col": 49,
"end_line": 117,
"start_col": 32,
"start_line": 117
} |
Prims.Tot | val gt (a b: t) : Tot bool | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b) | val gt (a b: t) : Tot bool
let gt (a b: t) : Tot bool = | false | null | false | gt #n (v a) (v b) | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [
"FStar.Int16.t",
"FStar.Int.gt",
"FStar.Int16.n",
"FStar.Int16.v",
"Prims.bool"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *) | false | true | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gt (a b: t) : Tot bool | [] | FStar.Int16.gt | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | {
"end_col": 49,
"end_line": 115,
"start_col": 32,
"start_line": 115
} |
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Hat_Hat = logxor | let op_Hat_Hat = | false | null | false | logxor | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [] | [
"FStar.Int16.logxor"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Hat_Hat : x: FStar.Int16.t -> y: FStar.Int16.t -> Prims.Pure FStar.Int16.t | [] | FStar.Int16.op_Hat_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | x: FStar.Int16.t -> y: FStar.Int16.t -> Prims.Pure FStar.Int16.t | {
"end_col": 30,
"end_line": 126,
"start_col": 24,
"start_line": 126
} |
|
Prims.Tot | val gte (a b: t) : Tot bool | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b) | val gte (a b: t) : Tot bool
let gte (a b: t) : Tot bool = | false | null | false | gte #n (v a) (v b) | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [
"FStar.Int16.t",
"FStar.Int.gte",
"FStar.Int16.n",
"FStar.Int16.v",
"Prims.bool"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b) | false | true | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val gte (a b: t) : Tot bool | [] | FStar.Int16.gte | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | {
"end_col": 51,
"end_line": 116,
"start_col": 33,
"start_line": 116
} |
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Star_Hat = mul | let op_Star_Hat = | false | null | false | mul | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [] | [
"FStar.Int16.mul"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Star_Hat : a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.Pure FStar.Int16.t | [] | FStar.Int16.op_Star_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.Pure FStar.Int16.t | {
"end_col": 28,
"end_line": 123,
"start_col": 25,
"start_line": 123
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Less_Equals_Hat = lte | let op_Less_Equals_Hat = | false | null | false | lte | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [
"FStar.Int16.lte"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div
unfold let op_Percent_Hat = rem
unfold let op_Hat_Hat = logxor
unfold let op_Amp_Hat = logand
unfold let op_Bar_Hat = logor
unfold let op_Less_Less_Hat = shift_left
unfold let op_Greater_Greater_Hat = shift_right
unfold let op_Greater_Greater_Greater_Hat = shift_arithmetic_right
unfold let op_Equals_Hat = eq
unfold let op_Greater_Hat = gt
unfold let op_Greater_Equals_Hat = gte | false | true | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Less_Equals_Hat : a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | [] | FStar.Int16.op_Less_Equals_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | {
"end_col": 35,
"end_line": 136,
"start_col": 32,
"start_line": 136
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Equals_Hat = eq | let op_Equals_Hat = | false | null | false | eq | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [
"FStar.Int16.eq"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div
unfold let op_Percent_Hat = rem
unfold let op_Hat_Hat = logxor
unfold let op_Amp_Hat = logand
unfold let op_Bar_Hat = logor
unfold let op_Less_Less_Hat = shift_left
unfold let op_Greater_Greater_Hat = shift_right | false | true | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Equals_Hat : a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | [] | FStar.Int16.op_Equals_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | {
"end_col": 29,
"end_line": 132,
"start_col": 27,
"start_line": 132
} |
|
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Greater_Greater_Hat = shift_right | let op_Greater_Greater_Hat = | false | null | false | shift_right | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [] | [
"FStar.Int16.shift_right"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div
unfold let op_Percent_Hat = rem
unfold let op_Hat_Hat = logxor
unfold let op_Amp_Hat = logand
unfold let op_Bar_Hat = logor | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Greater_Greater_Hat : a: FStar.Int16.t -> s: FStar.UInt32.t -> Prims.Pure FStar.Int16.t | [] | FStar.Int16.op_Greater_Greater_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> s: FStar.UInt32.t -> Prims.Pure FStar.Int16.t | {
"end_col": 47,
"end_line": 130,
"start_col": 36,
"start_line": 130
} |
|
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Bar_Hat = logor | let op_Bar_Hat = | false | null | false | logor | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [] | [
"FStar.Int16.logor"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div
unfold let op_Percent_Hat = rem
unfold let op_Hat_Hat = logxor | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Bar_Hat : x: FStar.Int16.t -> y: FStar.Int16.t -> Prims.Pure FStar.Int16.t | [] | FStar.Int16.op_Bar_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | x: FStar.Int16.t -> y: FStar.Int16.t -> Prims.Pure FStar.Int16.t | {
"end_col": 29,
"end_line": 128,
"start_col": 24,
"start_line": 128
} |
|
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Greater_Greater_Greater_Hat = shift_arithmetic_right | let op_Greater_Greater_Greater_Hat = | false | null | false | shift_arithmetic_right | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [] | [
"FStar.Int16.shift_arithmetic_right"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div
unfold let op_Percent_Hat = rem
unfold let op_Hat_Hat = logxor
unfold let op_Amp_Hat = logand
unfold let op_Bar_Hat = logor
unfold let op_Less_Less_Hat = shift_left | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Greater_Greater_Greater_Hat : a: FStar.Int16.t -> s: FStar.UInt32.t -> Prims.Pure FStar.Int16.t | [] | FStar.Int16.op_Greater_Greater_Greater_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> s: FStar.UInt32.t -> Prims.Pure FStar.Int16.t | {
"end_col": 66,
"end_line": 131,
"start_col": 44,
"start_line": 131
} |
|
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Percent_Hat = rem | let op_Percent_Hat = | false | null | false | rem | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [] | [
"FStar.Int16.rem"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Percent_Hat : a: FStar.Int16.t -> b: FStar.Int16.t{FStar.Int16.v b <> 0} -> Prims.Pure FStar.Int16.t | [] | FStar.Int16.op_Percent_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t{FStar.Int16.v b <> 0} -> Prims.Pure FStar.Int16.t | {
"end_col": 31,
"end_line": 125,
"start_col": 28,
"start_line": 125
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Greater_Hat = gt | let op_Greater_Hat = | false | null | false | gt | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [
"FStar.Int16.gt"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div
unfold let op_Percent_Hat = rem
unfold let op_Hat_Hat = logxor
unfold let op_Amp_Hat = logand
unfold let op_Bar_Hat = logor
unfold let op_Less_Less_Hat = shift_left
unfold let op_Greater_Greater_Hat = shift_right
unfold let op_Greater_Greater_Greater_Hat = shift_arithmetic_right | false | true | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Greater_Hat : a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | [] | FStar.Int16.op_Greater_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | {
"end_col": 30,
"end_line": 133,
"start_col": 28,
"start_line": 133
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Less_Hat = lt | let op_Less_Hat = | false | null | false | lt | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [
"FStar.Int16.lt"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div
unfold let op_Percent_Hat = rem
unfold let op_Hat_Hat = logxor
unfold let op_Amp_Hat = logand
unfold let op_Bar_Hat = logor
unfold let op_Less_Less_Hat = shift_left
unfold let op_Greater_Greater_Hat = shift_right
unfold let op_Greater_Greater_Greater_Hat = shift_arithmetic_right
unfold let op_Equals_Hat = eq
unfold let op_Greater_Hat = gt | false | true | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Less_Hat : a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | [] | FStar.Int16.op_Less_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | {
"end_col": 27,
"end_line": 135,
"start_col": 25,
"start_line": 135
} |
|
Prims.Pure | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Less_Less_Hat = shift_left | let op_Less_Less_Hat = | false | null | false | shift_left | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [] | [
"FStar.Int16.shift_left"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div
unfold let op_Percent_Hat = rem
unfold let op_Hat_Hat = logxor
unfold let op_Amp_Hat = logand | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Less_Less_Hat : a: FStar.Int16.t -> s: FStar.UInt32.t -> Prims.Pure FStar.Int16.t | [] | FStar.Int16.op_Less_Less_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> s: FStar.UInt32.t -> Prims.Pure FStar.Int16.t | {
"end_col": 40,
"end_line": 129,
"start_col": 30,
"start_line": 129
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Greater_Equals_Hat = gte | let op_Greater_Equals_Hat = | false | null | false | gte | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [
"FStar.Int16.gte"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div
unfold let op_Percent_Hat = rem
unfold let op_Hat_Hat = logxor
unfold let op_Amp_Hat = logand
unfold let op_Bar_Hat = logor
unfold let op_Less_Less_Hat = shift_left
unfold let op_Greater_Greater_Hat = shift_right
unfold let op_Greater_Greater_Greater_Hat = shift_arithmetic_right
unfold let op_Equals_Hat = eq | false | true | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Greater_Equals_Hat : a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | [] | FStar.Int16.op_Greater_Equals_Hat | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t -> b: FStar.Int16.t -> Prims.bool | {
"end_col": 38,
"end_line": 134,
"start_col": 35,
"start_line": 134
} |
|
Prims.Tot | val ct_abs (a: t{min_int n < v a}) : Tot (b: t{v b = abs (v a)}) | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Int",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ct_abs (a:t{min_int n < v a}) : Tot (b:t{v b = abs (v a)}) =
let mask = a >>>^ UInt32.uint_to_t (n - 1) in
if 0 <= v a then
begin
sign_bit_positive (v a);
nth_lemma (v mask) (FStar.Int.zero _);
logxor_lemma_1 (v a)
end
else
begin
sign_bit_negative (v a);
nth_lemma (v mask) (ones _);
logxor_lemma_2 (v a);
lognot_negative (v a);
UInt.lemma_lognot_value #n (to_uint (v a))
end;
(a ^^ mask) -^ mask | val ct_abs (a: t{min_int n < v a}) : Tot (b: t{v b = abs (v a)})
let ct_abs (a: t{min_int n < v a}) : Tot (b: t{v b = abs (v a)}) = | false | null | false | let mask = a >>>^ UInt32.uint_to_t (n - 1) in
if 0 <= v a
then
(sign_bit_positive (v a);
nth_lemma (v mask) (FStar.Int.zero _);
logxor_lemma_1 (v a))
else
(sign_bit_negative (v a);
nth_lemma (v mask) (ones _);
logxor_lemma_2 (v a);
lognot_negative (v a);
UInt.lemma_lognot_value #n (to_uint (v a)));
(a ^^ mask) -^ mask | {
"checked_file": "FStar.Int16.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Int.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Int16.fsti"
} | [
"total"
] | [
"FStar.Int16.t",
"Prims.b2t",
"Prims.op_LessThan",
"FStar.Int.min_int",
"FStar.Int16.n",
"FStar.Int16.v",
"FStar.Int16.op_Subtraction_Hat",
"FStar.Int16.op_Hat_Hat",
"Prims.unit",
"Prims.op_LessThanOrEqual",
"FStar.Int.logxor_lemma_1",
"FStar.Int.nth_lemma",
"FStar.Int.zero",
"FStar.Int.sign_bit_positive",
"Prims.bool",
"FStar.UInt.lemma_lognot_value",
"FStar.Int.to_uint",
"FStar.Int.lognot_negative",
"FStar.Int.logxor_lemma_2",
"FStar.Int.ones",
"FStar.Int.sign_bit_negative",
"FStar.Int16.op_Greater_Greater_Greater_Hat",
"FStar.UInt32.uint_to_t",
"Prims.op_Subtraction",
"Prims.op_Equality",
"Prims.int",
"Prims.abs"
] | [] | (*
Copyright 2008-2019 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Int16
(**** THIS MODULE IS GENERATED AUTOMATICALLY USING [mk_int.sh], DO NOT EDIT DIRECTLY ****)
unfold let n = 16
open FStar.Int
open FStar.Mul
#set-options "--max_fuel 0 --max_ifuel 0"
(* NOTE: anything that you fix/update here should be reflected in [FStar.UIntN.fstp], which is mostly
* a copy-paste of this module. *)
new val t : eqtype
val v (x:t) : Tot (int_t n)
val int_to_t: x:int_t n -> Pure t
(requires True)
(ensures (fun y -> v y = x))
val uv_inv (x : t) : Lemma
(ensures (int_to_t (v x) == x))
[SMTPat (v x)]
val vu_inv (x : int_t n) : Lemma
(ensures (v (int_to_t x) == x))
[SMTPat (int_to_t x)]
val v_inj (x1 x2: t): Lemma
(requires (v x1 == v x2))
(ensures (x1 == x2))
val zero : x:t{v x = 0}
val one : x:t{v x = 1}
val add (a:t) (b:t) : Pure t
(requires (size (v a + v b) n))
(ensures (fun c -> v a + v b = v c))
(* Subtraction primitives *)
val sub (a:t) (b:t) : Pure t
(requires (size (v a - v b) n))
(ensures (fun c -> v a - v b = v c))
(* Multiplication primitives *)
val mul (a:t) (b:t) : Pure t
(requires (size (v a * v b) n))
(ensures (fun c -> v a * v b = v c))
(* Division primitives *)
val div (a:t) (b:t{v b <> 0}) : Pure t
// division overflows on INT_MIN / -1
(requires (size (v a / v b) n))
(ensures (fun c -> v a / v b = v c))
(* Modulo primitives *)
(* If a/b is not representable the result of a%b is undefind *)
val rem (a:t) (b:t{v b <> 0}) : Pure t
(requires (size (v a / v b) n))
(ensures (fun c -> FStar.Int.mod (v a) (v b) = v c))
(* Bitwise operators *)
val logand (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logand` v y = v z))
val logxor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logxor` v y == v z))
val logor (x:t) (y:t) : Pure t
(requires True)
(ensures (fun z -> v x `logor` v y == v z))
val lognot (x:t) : Pure t
(requires True)
(ensures (fun z -> lognot (v x) == v z))
(* Shift operators *)
(** If a is negative the result is implementation-defined *)
val shift_right (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_right (v a) (UInt32.v s) = v c))
(** If a is negative or a * pow2 s is not representable the result is undefined *)
val shift_left (a:t) (s:UInt32.t) : Pure t
(requires (0 <= v a /\ v a * pow2 (UInt32.v s) <= max_int n /\ UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_left (v a) (UInt32.v s) = v c))
val shift_arithmetic_right (a:t) (s:UInt32.t) : Pure t
(requires (UInt32.v s < n))
(ensures (fun c -> FStar.Int.shift_arithmetic_right (v a) (UInt32.v s) = v c))
(* Comparison operators *)
let eq (a:t) (b:t) : Tot bool = eq #n (v a) (v b)
let gt (a:t) (b:t) : Tot bool = gt #n (v a) (v b)
let gte (a:t) (b:t) : Tot bool = gte #n (v a) (v b)
let lt (a:t) (b:t) : Tot bool = lt #n (v a) (v b)
let lte (a:t) (b:t) : Tot bool = lte #n (v a) (v b)
(* Infix notations *)
unfold let op_Plus_Hat = add
unfold let op_Subtraction_Hat = sub
unfold let op_Star_Hat = mul
unfold let op_Slash_Hat = div
unfold let op_Percent_Hat = rem
unfold let op_Hat_Hat = logxor
unfold let op_Amp_Hat = logand
unfold let op_Bar_Hat = logor
unfold let op_Less_Less_Hat = shift_left
unfold let op_Greater_Greater_Hat = shift_right
unfold let op_Greater_Greater_Greater_Hat = shift_arithmetic_right
unfold let op_Equals_Hat = eq
unfold let op_Greater_Hat = gt
unfold let op_Greater_Equals_Hat = gte
unfold let op_Less_Hat = lt
unfold let op_Less_Equals_Hat = lte | false | false | FStar.Int16.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ct_abs (a: t{min_int n < v a}) : Tot (b: t{v b = abs (v a)}) | [] | FStar.Int16.ct_abs | {
"file_name": "ulib/FStar.Int16.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: FStar.Int16.t{FStar.Int.min_int FStar.Int16.n < FStar.Int16.v a}
-> b: FStar.Int16.t{FStar.Int16.v b = Prims.abs (FStar.Int16.v a)} | {
"end_col": 21,
"end_line": 155,
"start_col": 64,
"start_line": 139
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let vprop_typing (g:env) (t:term) = tot_typing g t tm_vprop | let vprop_typing (g: env) (t: term) = | false | null | false | tot_typing g t tm_vprop | {
"checked_file": "Pulse.Checker.Prover.Base.fsti.checked",
"dependencies": [
"Pulse.Typing.Combinators.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Prover.Substs.fsti.checked",
"Pulse.Checker.Base.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Checker.Prover.Base.fsti"
} | [
"total"
] | [
"Pulse.Typing.Env.env",
"Pulse.Syntax.Base.term",
"Pulse.Typing.tot_typing",
"Pulse.Syntax.Base.tm_vprop"
] | [] | module Pulse.Checker.Prover.Base
open Pulse.Syntax
open Pulse.Typing
open Pulse.Typing.Combinators
open Pulse.Checker.Base
module T = FStar.Tactics.V2
module PS = Pulse.Checker.Prover.Substs | false | true | Pulse.Checker.Prover.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val vprop_typing : g: Pulse.Typing.Env.env -> t: Pulse.Syntax.Base.term -> Type0 | [] | Pulse.Checker.Prover.Base.vprop_typing | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Base.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | g: Pulse.Typing.Env.env -> t: Pulse.Syntax.Base.term -> Type0 | {
"end_col": 59,
"end_line": 12,
"start_col": 36,
"start_line": 12
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Pulse.Checker.VPropEquiv",
"short_module": "VP"
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Star = tm_star | let op_Star = | false | null | false | tm_star | {
"checked_file": "Pulse.Checker.Prover.Base.fsti.checked",
"dependencies": [
"Pulse.Typing.Combinators.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Prover.Substs.fsti.checked",
"Pulse.Checker.Base.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Checker.Prover.Base.fsti"
} | [
"total"
] | [
"Pulse.Syntax.Base.tm_star"
] | [] | module Pulse.Checker.Prover.Base
open Pulse.Syntax
open Pulse.Typing
open Pulse.Typing.Combinators
open Pulse.Checker.Base
module T = FStar.Tactics.V2
module PS = Pulse.Checker.Prover.Substs
let vprop_typing (g:env) (t:term) = tot_typing g t tm_vprop
//
// Scaffolding for adding elims
//
// Given a function f : vprop -> T.Tac bool that decides whether a vprop
// should be elim-ed,
// and an mk function to create the elim term, comp, and typing,
// add_elims will create a continuation_elaborator
//
type mk_t =
#g:env ->
#v:vprop ->
tot_typing g v tm_vprop ->
T.Tac (option (x:ppname &
t:st_term &
c:comp { stateful_comp c /\ comp_pre c == v } &
st_typing g t c))
val add_elims (#g:env) (#ctxt:term) (#frame:term)
(f:vprop -> T.Tac bool)
(mk:mk_t)
(ctxt_typing:tot_typing g (tm_star ctxt frame) tm_vprop)
(uvs:env { disjoint uvs g })
: T.Tac (g':env { env_extends g' g /\ disjoint uvs g' } &
ctxt':term &
tot_typing g' (tm_star ctxt' frame) tm_vprop &
continuation_elaborator g (tm_star ctxt frame) g' (tm_star ctxt' frame))
//
// Prover state
//
noeq type preamble = {
g0 : env;
ctxt : vprop;
frame : vprop;
ctxt_frame_typing : vprop_typing g0 (tm_star ctxt frame);
goals : vprop;
}
let op_Array_Access (ss:PS.ss_t) (t:term) =
PS.ss_term t ss | false | true | Pulse.Checker.Prover.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Star : l: Pulse.Syntax.Base.vprop -> r: Pulse.Syntax.Base.vprop -> Pulse.Syntax.Base.term | [] | Pulse.Checker.Prover.Base.op_Star | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Base.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | l: Pulse.Syntax.Base.vprop -> r: Pulse.Syntax.Base.vprop -> Pulse.Syntax.Base.term | {
"end_col": 21,
"end_line": 59,
"start_col": 14,
"start_line": 59
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Pulse.Checker.VPropEquiv",
"short_module": "VP"
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let op_Array_Access (ss:PS.ss_t) (t:term) =
PS.ss_term t ss | let op_Array_Access (ss: PS.ss_t) (t: term) = | false | null | false | PS.ss_term t ss | {
"checked_file": "Pulse.Checker.Prover.Base.fsti.checked",
"dependencies": [
"Pulse.Typing.Combinators.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Prover.Substs.fsti.checked",
"Pulse.Checker.Base.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Checker.Prover.Base.fsti"
} | [
"total"
] | [
"Pulse.Checker.Prover.Substs.ss_t",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.ss_term"
] | [] | module Pulse.Checker.Prover.Base
open Pulse.Syntax
open Pulse.Typing
open Pulse.Typing.Combinators
open Pulse.Checker.Base
module T = FStar.Tactics.V2
module PS = Pulse.Checker.Prover.Substs
let vprop_typing (g:env) (t:term) = tot_typing g t tm_vprop
//
// Scaffolding for adding elims
//
// Given a function f : vprop -> T.Tac bool that decides whether a vprop
// should be elim-ed,
// and an mk function to create the elim term, comp, and typing,
// add_elims will create a continuation_elaborator
//
type mk_t =
#g:env ->
#v:vprop ->
tot_typing g v tm_vprop ->
T.Tac (option (x:ppname &
t:st_term &
c:comp { stateful_comp c /\ comp_pre c == v } &
st_typing g t c))
val add_elims (#g:env) (#ctxt:term) (#frame:term)
(f:vprop -> T.Tac bool)
(mk:mk_t)
(ctxt_typing:tot_typing g (tm_star ctxt frame) tm_vprop)
(uvs:env { disjoint uvs g })
: T.Tac (g':env { env_extends g' g /\ disjoint uvs g' } &
ctxt':term &
tot_typing g' (tm_star ctxt' frame) tm_vprop &
continuation_elaborator g (tm_star ctxt frame) g' (tm_star ctxt' frame))
//
// Prover state
//
noeq type preamble = {
g0 : env;
ctxt : vprop;
frame : vprop;
ctxt_frame_typing : vprop_typing g0 (tm_star ctxt frame);
goals : vprop;
} | false | true | Pulse.Checker.Prover.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_Array_Access : ss: Pulse.Checker.Prover.Substs.ss_t -> t: Pulse.Syntax.Base.term -> Pulse.Syntax.Base.term | [] | Pulse.Checker.Prover.Base.op_Array_Access | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Base.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | ss: Pulse.Checker.Prover.Substs.ss_t -> t: Pulse.Syntax.Base.term -> Pulse.Syntax.Base.term | {
"end_col": 17,
"end_line": 57,
"start_col": 2,
"start_line": 57
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Pulse.Checker.VPropEquiv",
"short_module": "VP"
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let is_terminal (#preamble:_) (st:prover_state preamble) =
st.unsolved == [] | let is_terminal (#preamble: _) (st: prover_state preamble) = | false | null | false | st.unsolved == [] | {
"checked_file": "Pulse.Checker.Prover.Base.fsti.checked",
"dependencies": [
"Pulse.Typing.Combinators.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Prover.Substs.fsti.checked",
"Pulse.Checker.Base.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Checker.Prover.Base.fsti"
} | [
"total"
] | [
"Pulse.Checker.Prover.Base.preamble",
"Pulse.Checker.Prover.Base.prover_state",
"Prims.eq2",
"Prims.list",
"Pulse.Syntax.Base.vprop",
"Pulse.Checker.Prover.Base.__proj__Mkprover_state__item__unsolved",
"Prims.Nil",
"Prims.logical"
] | [] | module Pulse.Checker.Prover.Base
open Pulse.Syntax
open Pulse.Typing
open Pulse.Typing.Combinators
open Pulse.Checker.Base
module T = FStar.Tactics.V2
module PS = Pulse.Checker.Prover.Substs
let vprop_typing (g:env) (t:term) = tot_typing g t tm_vprop
//
// Scaffolding for adding elims
//
// Given a function f : vprop -> T.Tac bool that decides whether a vprop
// should be elim-ed,
// and an mk function to create the elim term, comp, and typing,
// add_elims will create a continuation_elaborator
//
type mk_t =
#g:env ->
#v:vprop ->
tot_typing g v tm_vprop ->
T.Tac (option (x:ppname &
t:st_term &
c:comp { stateful_comp c /\ comp_pre c == v } &
st_typing g t c))
val add_elims (#g:env) (#ctxt:term) (#frame:term)
(f:vprop -> T.Tac bool)
(mk:mk_t)
(ctxt_typing:tot_typing g (tm_star ctxt frame) tm_vprop)
(uvs:env { disjoint uvs g })
: T.Tac (g':env { env_extends g' g /\ disjoint uvs g' } &
ctxt':term &
tot_typing g' (tm_star ctxt' frame) tm_vprop &
continuation_elaborator g (tm_star ctxt frame) g' (tm_star ctxt' frame))
//
// Prover state
//
noeq type preamble = {
g0 : env;
ctxt : vprop;
frame : vprop;
ctxt_frame_typing : vprop_typing g0 (tm_star ctxt frame);
goals : vprop;
}
let op_Array_Access (ss:PS.ss_t) (t:term) =
PS.ss_term t ss
let op_Star = tm_star
noeq type prover_state (preamble:preamble) = {
pg : g:env { g `env_extends` preamble.g0 };
remaining_ctxt : list vprop;
remaining_ctxt_frame_typing : vprop_typing pg (list_as_vprop remaining_ctxt * preamble.frame);
uvs : uvs:env { disjoint uvs pg };
ss : PS.ss_t;
solved : vprop;
unsolved : list vprop;
k : continuation_elaborator preamble.g0 (preamble.ctxt * preamble.frame)
pg ((list_as_vprop remaining_ctxt * preamble.frame) * ss.(solved));
goals_inv : vprop_equiv (push_env pg uvs) preamble.goals (list_as_vprop unsolved * solved);
solved_inv : squash (freevars ss.(solved) `Set.subset` dom pg);
} | false | false | Pulse.Checker.Prover.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val is_terminal : st: Pulse.Checker.Prover.Base.prover_state preamble -> Prims.logical | [] | Pulse.Checker.Prover.Base.is_terminal | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Base.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | st: Pulse.Checker.Prover.Base.prover_state preamble -> Prims.logical | {
"end_col": 19,
"end_line": 81,
"start_col": 2,
"start_line": 81
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Pulse.Checker.VPropEquiv",
"short_module": "VP"
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let pst_extends (#preamble:_) (pst1 pst2:prover_state preamble) =
pst1.pg `env_extends` pst2.pg /\
pst1.uvs `env_extends` pst2.uvs /\
pst1.ss `ss_extends` pst2.ss | let pst_extends (#preamble: _) (pst1 pst2: prover_state preamble) = | false | null | false | pst1.pg `env_extends` pst2.pg /\ pst1.uvs `env_extends` pst2.uvs /\ pst1.ss `ss_extends` pst2.ss | {
"checked_file": "Pulse.Checker.Prover.Base.fsti.checked",
"dependencies": [
"Pulse.Typing.Combinators.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Prover.Substs.fsti.checked",
"Pulse.Checker.Base.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Checker.Prover.Base.fsti"
} | [
"total"
] | [
"Pulse.Checker.Prover.Base.preamble",
"Pulse.Checker.Prover.Base.prover_state",
"Prims.l_and",
"Pulse.Typing.Env.env_extends",
"Pulse.Checker.Prover.Base.__proj__Mkprover_state__item__pg",
"Pulse.Checker.Prover.Base.__proj__Mkprover_state__item__uvs",
"Pulse.Checker.Prover.Base.ss_extends",
"Pulse.Checker.Prover.Base.__proj__Mkprover_state__item__ss",
"Prims.logical"
] | [] | module Pulse.Checker.Prover.Base
open Pulse.Syntax
open Pulse.Typing
open Pulse.Typing.Combinators
open Pulse.Checker.Base
module T = FStar.Tactics.V2
module PS = Pulse.Checker.Prover.Substs
let vprop_typing (g:env) (t:term) = tot_typing g t tm_vprop
//
// Scaffolding for adding elims
//
// Given a function f : vprop -> T.Tac bool that decides whether a vprop
// should be elim-ed,
// and an mk function to create the elim term, comp, and typing,
// add_elims will create a continuation_elaborator
//
type mk_t =
#g:env ->
#v:vprop ->
tot_typing g v tm_vprop ->
T.Tac (option (x:ppname &
t:st_term &
c:comp { stateful_comp c /\ comp_pre c == v } &
st_typing g t c))
val add_elims (#g:env) (#ctxt:term) (#frame:term)
(f:vprop -> T.Tac bool)
(mk:mk_t)
(ctxt_typing:tot_typing g (tm_star ctxt frame) tm_vprop)
(uvs:env { disjoint uvs g })
: T.Tac (g':env { env_extends g' g /\ disjoint uvs g' } &
ctxt':term &
tot_typing g' (tm_star ctxt' frame) tm_vprop &
continuation_elaborator g (tm_star ctxt frame) g' (tm_star ctxt' frame))
//
// Prover state
//
noeq type preamble = {
g0 : env;
ctxt : vprop;
frame : vprop;
ctxt_frame_typing : vprop_typing g0 (tm_star ctxt frame);
goals : vprop;
}
let op_Array_Access (ss:PS.ss_t) (t:term) =
PS.ss_term t ss
let op_Star = tm_star
noeq type prover_state (preamble:preamble) = {
pg : g:env { g `env_extends` preamble.g0 };
remaining_ctxt : list vprop;
remaining_ctxt_frame_typing : vprop_typing pg (list_as_vprop remaining_ctxt * preamble.frame);
uvs : uvs:env { disjoint uvs pg };
ss : PS.ss_t;
solved : vprop;
unsolved : list vprop;
k : continuation_elaborator preamble.g0 (preamble.ctxt * preamble.frame)
pg ((list_as_vprop remaining_ctxt * preamble.frame) * ss.(solved));
goals_inv : vprop_equiv (push_env pg uvs) preamble.goals (list_as_vprop unsolved * solved);
solved_inv : squash (freevars ss.(solved) `Set.subset` dom pg);
}
let is_terminal (#preamble:_) (st:prover_state preamble) =
st.unsolved == []
irreducible
let extend_post_hint_opt_g (g:env) (post_hint:post_hint_opt g) (g1:env { g1 `env_extends` g })
: p:post_hint_opt g1 { p == post_hint } =
match post_hint with
| None -> None
| Some post_hint ->
assert (g `env_extends` post_hint.g);
assert (g1 `env_extends` g);
assert (g1 `env_extends` post_hint.g);
Some post_hint
let ss_extends (ss1 ss2:PS.ss_t) =
Set.subset (PS.dom ss2) (PS.dom ss1) /\
(forall (x:var). PS.contains ss2 x ==> PS.sel ss1 x == PS.sel ss2 x) | false | false | Pulse.Checker.Prover.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val pst_extends : pst1: Pulse.Checker.Prover.Base.prover_state preamble ->
pst2: Pulse.Checker.Prover.Base.prover_state preamble
-> Prims.logical | [] | Pulse.Checker.Prover.Base.pst_extends | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Base.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
pst1: Pulse.Checker.Prover.Base.prover_state preamble ->
pst2: Pulse.Checker.Prover.Base.prover_state preamble
-> Prims.logical | {
"end_col": 30,
"end_line": 101,
"start_col": 2,
"start_line": 99
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Pulse.Checker.VPropEquiv",
"short_module": "VP"
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let ss_extends (ss1 ss2:PS.ss_t) =
Set.subset (PS.dom ss2) (PS.dom ss1) /\
(forall (x:var). PS.contains ss2 x ==> PS.sel ss1 x == PS.sel ss2 x) | let ss_extends (ss1 ss2: PS.ss_t) = | false | null | false | Set.subset (PS.dom ss2) (PS.dom ss1) /\
(forall (x: var). PS.contains ss2 x ==> PS.sel ss1 x == PS.sel ss2 x) | {
"checked_file": "Pulse.Checker.Prover.Base.fsti.checked",
"dependencies": [
"Pulse.Typing.Combinators.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Prover.Substs.fsti.checked",
"Pulse.Checker.Base.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Checker.Prover.Base.fsti"
} | [
"total"
] | [
"Pulse.Checker.Prover.Substs.ss_t",
"Prims.l_and",
"FStar.Set.subset",
"Pulse.Syntax.Base.var",
"Pulse.Checker.Prover.Substs.dom",
"Prims.l_Forall",
"Prims.l_imp",
"Prims.b2t",
"Pulse.Checker.Prover.Substs.contains",
"Prims.eq2",
"Pulse.Syntax.Base.term",
"Pulse.Checker.Prover.Substs.sel",
"Prims.logical"
] | [] | module Pulse.Checker.Prover.Base
open Pulse.Syntax
open Pulse.Typing
open Pulse.Typing.Combinators
open Pulse.Checker.Base
module T = FStar.Tactics.V2
module PS = Pulse.Checker.Prover.Substs
let vprop_typing (g:env) (t:term) = tot_typing g t tm_vprop
//
// Scaffolding for adding elims
//
// Given a function f : vprop -> T.Tac bool that decides whether a vprop
// should be elim-ed,
// and an mk function to create the elim term, comp, and typing,
// add_elims will create a continuation_elaborator
//
type mk_t =
#g:env ->
#v:vprop ->
tot_typing g v tm_vprop ->
T.Tac (option (x:ppname &
t:st_term &
c:comp { stateful_comp c /\ comp_pre c == v } &
st_typing g t c))
val add_elims (#g:env) (#ctxt:term) (#frame:term)
(f:vprop -> T.Tac bool)
(mk:mk_t)
(ctxt_typing:tot_typing g (tm_star ctxt frame) tm_vprop)
(uvs:env { disjoint uvs g })
: T.Tac (g':env { env_extends g' g /\ disjoint uvs g' } &
ctxt':term &
tot_typing g' (tm_star ctxt' frame) tm_vprop &
continuation_elaborator g (tm_star ctxt frame) g' (tm_star ctxt' frame))
//
// Prover state
//
noeq type preamble = {
g0 : env;
ctxt : vprop;
frame : vprop;
ctxt_frame_typing : vprop_typing g0 (tm_star ctxt frame);
goals : vprop;
}
let op_Array_Access (ss:PS.ss_t) (t:term) =
PS.ss_term t ss
let op_Star = tm_star
noeq type prover_state (preamble:preamble) = {
pg : g:env { g `env_extends` preamble.g0 };
remaining_ctxt : list vprop;
remaining_ctxt_frame_typing : vprop_typing pg (list_as_vprop remaining_ctxt * preamble.frame);
uvs : uvs:env { disjoint uvs pg };
ss : PS.ss_t;
solved : vprop;
unsolved : list vprop;
k : continuation_elaborator preamble.g0 (preamble.ctxt * preamble.frame)
pg ((list_as_vprop remaining_ctxt * preamble.frame) * ss.(solved));
goals_inv : vprop_equiv (push_env pg uvs) preamble.goals (list_as_vprop unsolved * solved);
solved_inv : squash (freevars ss.(solved) `Set.subset` dom pg);
}
let is_terminal (#preamble:_) (st:prover_state preamble) =
st.unsolved == []
irreducible
let extend_post_hint_opt_g (g:env) (post_hint:post_hint_opt g) (g1:env { g1 `env_extends` g })
: p:post_hint_opt g1 { p == post_hint } =
match post_hint with
| None -> None
| Some post_hint ->
assert (g `env_extends` post_hint.g);
assert (g1 `env_extends` g);
assert (g1 `env_extends` post_hint.g);
Some post_hint | false | true | Pulse.Checker.Prover.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val ss_extends : ss1: Pulse.Checker.Prover.Substs.ss_t -> ss2: Pulse.Checker.Prover.Substs.ss_t -> Prims.logical | [] | Pulse.Checker.Prover.Base.ss_extends | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Base.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | ss1: Pulse.Checker.Prover.Substs.ss_t -> ss2: Pulse.Checker.Prover.Substs.ss_t -> Prims.logical | {
"end_col": 70,
"end_line": 96,
"start_col": 2,
"start_line": 95
} |
|
Prims.Tot | val extend_post_hint_opt_g (g: env) (post_hint: post_hint_opt g) (g1: env{g1 `env_extends` g})
: p: post_hint_opt g1 {p == post_hint} | [
{
"abbrev": true,
"full_module": "Pulse.Checker.VPropEquiv",
"short_module": "VP"
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.Checker.Prover.Substs",
"short_module": "PS"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing.Combinators",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Typing",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Syntax",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Checker.Prover",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let extend_post_hint_opt_g (g:env) (post_hint:post_hint_opt g) (g1:env { g1 `env_extends` g })
: p:post_hint_opt g1 { p == post_hint } =
match post_hint with
| None -> None
| Some post_hint ->
assert (g `env_extends` post_hint.g);
assert (g1 `env_extends` g);
assert (g1 `env_extends` post_hint.g);
Some post_hint | val extend_post_hint_opt_g (g: env) (post_hint: post_hint_opt g) (g1: env{g1 `env_extends` g})
: p: post_hint_opt g1 {p == post_hint}
let extend_post_hint_opt_g (g: env) (post_hint: post_hint_opt g) (g1: env{g1 `env_extends` g})
: p: post_hint_opt g1 {p == post_hint} = | false | null | false | match post_hint with
| None -> None
| Some post_hint ->
assert (g `env_extends` post_hint.g);
assert (g1 `env_extends` g);
assert (g1 `env_extends` post_hint.g);
Some post_hint | {
"checked_file": "Pulse.Checker.Prover.Base.fsti.checked",
"dependencies": [
"Pulse.Typing.Combinators.fsti.checked",
"Pulse.Typing.fst.checked",
"Pulse.Syntax.fst.checked",
"Pulse.Checker.Prover.Substs.fsti.checked",
"Pulse.Checker.Base.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Set.fsti.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Checker.Prover.Base.fsti"
} | [
"total"
] | [
"Pulse.Typing.Env.env",
"Pulse.Typing.post_hint_opt",
"Pulse.Typing.Env.env_extends",
"FStar.Pervasives.Native.None",
"Pulse.Typing.post_hint_t",
"FStar.Pervasives.Native.Some",
"Prims.unit",
"Prims._assert",
"Pulse.Typing.__proj__Mkpost_hint_t__item__g",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"Prims.l_or",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_None",
"Pulse.Typing.post_hint_for_env_p",
"FStar.Pervasives.Native.__proj__Some__item__v"
] | [] | module Pulse.Checker.Prover.Base
open Pulse.Syntax
open Pulse.Typing
open Pulse.Typing.Combinators
open Pulse.Checker.Base
module T = FStar.Tactics.V2
module PS = Pulse.Checker.Prover.Substs
let vprop_typing (g:env) (t:term) = tot_typing g t tm_vprop
//
// Scaffolding for adding elims
//
// Given a function f : vprop -> T.Tac bool that decides whether a vprop
// should be elim-ed,
// and an mk function to create the elim term, comp, and typing,
// add_elims will create a continuation_elaborator
//
type mk_t =
#g:env ->
#v:vprop ->
tot_typing g v tm_vprop ->
T.Tac (option (x:ppname &
t:st_term &
c:comp { stateful_comp c /\ comp_pre c == v } &
st_typing g t c))
val add_elims (#g:env) (#ctxt:term) (#frame:term)
(f:vprop -> T.Tac bool)
(mk:mk_t)
(ctxt_typing:tot_typing g (tm_star ctxt frame) tm_vprop)
(uvs:env { disjoint uvs g })
: T.Tac (g':env { env_extends g' g /\ disjoint uvs g' } &
ctxt':term &
tot_typing g' (tm_star ctxt' frame) tm_vprop &
continuation_elaborator g (tm_star ctxt frame) g' (tm_star ctxt' frame))
//
// Prover state
//
noeq type preamble = {
g0 : env;
ctxt : vprop;
frame : vprop;
ctxt_frame_typing : vprop_typing g0 (tm_star ctxt frame);
goals : vprop;
}
let op_Array_Access (ss:PS.ss_t) (t:term) =
PS.ss_term t ss
let op_Star = tm_star
noeq type prover_state (preamble:preamble) = {
pg : g:env { g `env_extends` preamble.g0 };
remaining_ctxt : list vprop;
remaining_ctxt_frame_typing : vprop_typing pg (list_as_vprop remaining_ctxt * preamble.frame);
uvs : uvs:env { disjoint uvs pg };
ss : PS.ss_t;
solved : vprop;
unsolved : list vprop;
k : continuation_elaborator preamble.g0 (preamble.ctxt * preamble.frame)
pg ((list_as_vprop remaining_ctxt * preamble.frame) * ss.(solved));
goals_inv : vprop_equiv (push_env pg uvs) preamble.goals (list_as_vprop unsolved * solved);
solved_inv : squash (freevars ss.(solved) `Set.subset` dom pg);
}
let is_terminal (#preamble:_) (st:prover_state preamble) =
st.unsolved == []
irreducible
let extend_post_hint_opt_g (g:env) (post_hint:post_hint_opt g) (g1:env { g1 `env_extends` g }) | false | false | Pulse.Checker.Prover.Base.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val extend_post_hint_opt_g (g: env) (post_hint: post_hint_opt g) (g1: env{g1 `env_extends` g})
: p: post_hint_opt g1 {p == post_hint} | [] | Pulse.Checker.Prover.Base.extend_post_hint_opt_g | {
"file_name": "lib/steel/pulse/Pulse.Checker.Prover.Base.fsti",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
g: Pulse.Typing.Env.env ->
post_hint: Pulse.Typing.post_hint_opt g ->
g1: Pulse.Typing.Env.env{Pulse.Typing.Env.env_extends g1 g}
-> p: Pulse.Typing.post_hint_opt g1 {p == post_hint} | {
"end_col": 18,
"end_line": 92,
"start_col": 2,
"start_line": 86
} |
Prims.Tot | val make_r (key_r: nat128) : nat128 | [
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let make_r (key_r:nat128) : nat128 =
iand key_r 0x0ffffffc0ffffffc0ffffffc0fffffff | val make_r (key_r: nat128) : nat128
let make_r (key_r: nat128) : nat128 = | false | null | false | iand key_r 0x0ffffffc0ffffffc0ffffffc0fffffff | {
"checked_file": "Vale.Poly1305.Spec_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Poly1305.Spec_s.fst"
} | [
"total"
] | [
"Vale.Def.Words_s.nat128",
"Vale.Def.Types_s.iand",
"Vale.Def.Words_s.pow2_128"
] | [] | module Vale.Poly1305.Spec_s
open FStar.Mul
open Vale.Def.Words_s
open Vale.Def.Types_s
[@"opaque_to_smt"]
let modp (x:int) : int =
x % (pow2_128 * 4 - 5)
[@"opaque_to_smt"]
let mod2_128 (x:int) : int =
x % pow2_128
let rec poly1305_hash_blocks (h pad r:int) (inp:int -> nat128) (k:nat) : int =
if k = 0 then h
else
let hh = poly1305_hash_blocks h pad r inp (k - 1) in
modp ((hh + pad + inp (k - 1)) * r) | false | true | Vale.Poly1305.Spec_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val make_r (key_r: nat128) : nat128 | [] | Vale.Poly1305.Spec_s.make_r | {
"file_name": "vale/specs/crypto/Vale.Poly1305.Spec_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | key_r: Vale.Def.Words_s.nat128 -> Vale.Def.Words_s.nat128 | {
"end_col": 47,
"end_line": 22,
"start_col": 2,
"start_line": 22
} |
Prims.Tot | val poly1305_hash (key_r key_s: nat128) (inp: (int -> nat128)) (len: nat) : int | [
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let poly1305_hash (key_r key_s:nat128) (inp:int -> nat128) (len:nat) : int =
poly1305_hash_all 0 key_r key_s inp len | val poly1305_hash (key_r key_s: nat128) (inp: (int -> nat128)) (len: nat) : int
let poly1305_hash (key_r key_s: nat128) (inp: (int -> nat128)) (len: nat) : int = | false | null | false | poly1305_hash_all 0 key_r key_s inp len | {
"checked_file": "Vale.Poly1305.Spec_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Poly1305.Spec_s.fst"
} | [
"total"
] | [
"Vale.Def.Words_s.nat128",
"Prims.int",
"Prims.nat",
"Vale.Poly1305.Spec_s.poly1305_hash_all"
] | [] | module Vale.Poly1305.Spec_s
open FStar.Mul
open Vale.Def.Words_s
open Vale.Def.Types_s
[@"opaque_to_smt"]
let modp (x:int) : int =
x % (pow2_128 * 4 - 5)
[@"opaque_to_smt"]
let mod2_128 (x:int) : int =
x % pow2_128
let rec poly1305_hash_blocks (h pad r:int) (inp:int -> nat128) (k:nat) : int =
if k = 0 then h
else
let hh = poly1305_hash_blocks h pad r inp (k - 1) in
modp ((hh + pad + inp (k - 1)) * r)
let make_r (key_r:nat128) : nat128 =
iand key_r 0x0ffffffc0ffffffc0ffffffc0fffffff
let poly1305_hash_all (h:int) (key_r key_s:nat128) (inp:int -> nat128) (len:nat) : int =
let nBlocks = len / 16 in
let nExtra = len % 16 in
let hBlocks = poly1305_hash_blocks h pow2_128 (make_r key_r) inp nBlocks in
if nExtra = 0 then
mod2_128 (hBlocks + key_s)
else
let padLast = pow2 (nExtra * 8) in
let hLast = modp ((hBlocks + padLast + inp nBlocks % padLast) * (make_r key_r)) in
mod2_128 (hLast + key_s) | false | true | Vale.Poly1305.Spec_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val poly1305_hash (key_r key_s: nat128) (inp: (int -> nat128)) (len: nat) : int | [] | Vale.Poly1305.Spec_s.poly1305_hash | {
"file_name": "vale/specs/crypto/Vale.Poly1305.Spec_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
key_r: Vale.Def.Words_s.nat128 ->
key_s: Vale.Def.Words_s.nat128 ->
inp: (_: Prims.int -> Vale.Def.Words_s.nat128) ->
len: Prims.nat
-> Prims.int | {
"end_col": 41,
"end_line": 36,
"start_col": 2,
"start_line": 36
} |
Prims.Tot | val mod2_128 (x: int) : int | [
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mod2_128 (x:int) : int =
x % pow2_128 | val mod2_128 (x: int) : int
let mod2_128 (x: int) : int = | false | null | false | x % pow2_128 | {
"checked_file": "Vale.Poly1305.Spec_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Poly1305.Spec_s.fst"
} | [
"total"
] | [
"Prims.int",
"Prims.op_Modulus",
"Vale.Def.Words_s.pow2_128"
] | [] | module Vale.Poly1305.Spec_s
open FStar.Mul
open Vale.Def.Words_s
open Vale.Def.Types_s
[@"opaque_to_smt"]
let modp (x:int) : int =
x % (pow2_128 * 4 - 5)
[@"opaque_to_smt"] | false | true | Vale.Poly1305.Spec_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mod2_128 (x: int) : int | [] | Vale.Poly1305.Spec_s.mod2_128 | {
"file_name": "vale/specs/crypto/Vale.Poly1305.Spec_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Prims.int -> Prims.int | {
"end_col": 14,
"end_line": 13,
"start_col": 2,
"start_line": 13
} |
Prims.Tot | val modp (x: int) : int | [
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let modp (x:int) : int =
x % (pow2_128 * 4 - 5) | val modp (x: int) : int
let modp (x: int) : int = | false | null | false | x % (pow2_128 * 4 - 5) | {
"checked_file": "Vale.Poly1305.Spec_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Poly1305.Spec_s.fst"
} | [
"total"
] | [
"Prims.int",
"Prims.op_Modulus",
"Prims.op_Subtraction",
"FStar.Mul.op_Star",
"Vale.Def.Words_s.pow2_128"
] | [] | module Vale.Poly1305.Spec_s
open FStar.Mul
open Vale.Def.Words_s
open Vale.Def.Types_s
[@"opaque_to_smt"] | false | true | Vale.Poly1305.Spec_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val modp (x: int) : int | [] | Vale.Poly1305.Spec_s.modp | {
"file_name": "vale/specs/crypto/Vale.Poly1305.Spec_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Prims.int -> Prims.int | {
"end_col": 24,
"end_line": 9,
"start_col": 2,
"start_line": 9
} |
Prims.Tot | val poly1305_hash_all (h: int) (key_r key_s: nat128) (inp: (int -> nat128)) (len: nat) : int | [
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let poly1305_hash_all (h:int) (key_r key_s:nat128) (inp:int -> nat128) (len:nat) : int =
let nBlocks = len / 16 in
let nExtra = len % 16 in
let hBlocks = poly1305_hash_blocks h pow2_128 (make_r key_r) inp nBlocks in
if nExtra = 0 then
mod2_128 (hBlocks + key_s)
else
let padLast = pow2 (nExtra * 8) in
let hLast = modp ((hBlocks + padLast + inp nBlocks % padLast) * (make_r key_r)) in
mod2_128 (hLast + key_s) | val poly1305_hash_all (h: int) (key_r key_s: nat128) (inp: (int -> nat128)) (len: nat) : int
let poly1305_hash_all (h: int) (key_r key_s: nat128) (inp: (int -> nat128)) (len: nat) : int = | false | null | false | let nBlocks = len / 16 in
let nExtra = len % 16 in
let hBlocks = poly1305_hash_blocks h pow2_128 (make_r key_r) inp nBlocks in
if nExtra = 0
then mod2_128 (hBlocks + key_s)
else
let padLast = pow2 (nExtra * 8) in
let hLast = modp ((hBlocks + padLast + inp nBlocks % padLast) * (make_r key_r)) in
mod2_128 (hLast + key_s) | {
"checked_file": "Vale.Poly1305.Spec_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Poly1305.Spec_s.fst"
} | [
"total"
] | [
"Prims.int",
"Vale.Def.Words_s.nat128",
"Prims.nat",
"Prims.op_Equality",
"Vale.Poly1305.Spec_s.mod2_128",
"Prims.op_Addition",
"Prims.bool",
"Vale.Poly1305.Spec_s.modp",
"FStar.Mul.op_Star",
"Prims.op_Modulus",
"Vale.Poly1305.Spec_s.make_r",
"Prims.pos",
"Prims.pow2",
"Vale.Poly1305.Spec_s.poly1305_hash_blocks",
"Vale.Def.Words_s.pow2_128",
"Prims.op_Division"
] | [] | module Vale.Poly1305.Spec_s
open FStar.Mul
open Vale.Def.Words_s
open Vale.Def.Types_s
[@"opaque_to_smt"]
let modp (x:int) : int =
x % (pow2_128 * 4 - 5)
[@"opaque_to_smt"]
let mod2_128 (x:int) : int =
x % pow2_128
let rec poly1305_hash_blocks (h pad r:int) (inp:int -> nat128) (k:nat) : int =
if k = 0 then h
else
let hh = poly1305_hash_blocks h pad r inp (k - 1) in
modp ((hh + pad + inp (k - 1)) * r)
let make_r (key_r:nat128) : nat128 =
iand key_r 0x0ffffffc0ffffffc0ffffffc0fffffff | false | true | Vale.Poly1305.Spec_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val poly1305_hash_all (h: int) (key_r key_s: nat128) (inp: (int -> nat128)) (len: nat) : int | [] | Vale.Poly1305.Spec_s.poly1305_hash_all | {
"file_name": "vale/specs/crypto/Vale.Poly1305.Spec_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Prims.int ->
key_r: Vale.Def.Words_s.nat128 ->
key_s: Vale.Def.Words_s.nat128 ->
inp: (_: Prims.int -> Vale.Def.Words_s.nat128) ->
len: Prims.nat
-> Prims.int | {
"end_col": 28,
"end_line": 33,
"start_col": 88,
"start_line": 24
} |
Prims.Tot | val poly1305_hash_blocks (h pad r: int) (inp: (int -> nat128)) (k: nat) : int | [
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec poly1305_hash_blocks (h pad r:int) (inp:int -> nat128) (k:nat) : int =
if k = 0 then h
else
let hh = poly1305_hash_blocks h pad r inp (k - 1) in
modp ((hh + pad + inp (k - 1)) * r) | val poly1305_hash_blocks (h pad r: int) (inp: (int -> nat128)) (k: nat) : int
let rec poly1305_hash_blocks (h pad r: int) (inp: (int -> nat128)) (k: nat) : int = | false | null | false | if k = 0
then h
else
let hh = poly1305_hash_blocks h pad r inp (k - 1) in
modp ((hh + pad + inp (k - 1)) * r) | {
"checked_file": "Vale.Poly1305.Spec_s.fst.checked",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Poly1305.Spec_s.fst"
} | [
"total"
] | [
"Prims.int",
"Vale.Def.Words_s.nat128",
"Prims.nat",
"Prims.op_Equality",
"Prims.bool",
"Vale.Poly1305.Spec_s.modp",
"FStar.Mul.op_Star",
"Prims.op_Addition",
"Prims.op_Subtraction",
"Vale.Poly1305.Spec_s.poly1305_hash_blocks"
] | [] | module Vale.Poly1305.Spec_s
open FStar.Mul
open Vale.Def.Words_s
open Vale.Def.Types_s
[@"opaque_to_smt"]
let modp (x:int) : int =
x % (pow2_128 * 4 - 5)
[@"opaque_to_smt"]
let mod2_128 (x:int) : int =
x % pow2_128 | false | true | Vale.Poly1305.Spec_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val poly1305_hash_blocks (h pad r: int) (inp: (int -> nat128)) (k: nat) : int | [
"recursion"
] | Vale.Poly1305.Spec_s.poly1305_hash_blocks | {
"file_name": "vale/specs/crypto/Vale.Poly1305.Spec_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Prims.int ->
pad: Prims.int ->
r: Prims.int ->
inp: (_: Prims.int -> Vale.Def.Words_s.nat128) ->
k: Prims.nat
-> Prims.int | {
"end_col": 39,
"end_line": 19,
"start_col": 2,
"start_line": 16
} |
Prims.Tot | val bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) =
let y = reveal x in
f y | val bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b)
let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) = | false | null | false | let y = reveal x in
f y | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Ghost.reveal"
] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may
be useful to apply this coercion explicitly. *)
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x
(** [erased]: Injecting a value into [erased]; just an alias of [hide] *)
let return (#a: Type) (x: a) : erased a = hide x | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) | [] | FStar.Ghost.bind | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | x: FStar.Ghost.erased a -> f: (_: a -> FStar.Ghost.erased b) -> FStar.Ghost.erased b | {
"end_col": 5,
"end_line": 77,
"start_col": 82,
"start_line": 75
} |
Prims.GTot | val tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x | val tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = | false | null | false | f x | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"sometrivial"
] | [] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b | [] | FStar.Ghost.tot_to_gtot | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: 'a -> 'b) -> x: 'a -> Prims.GTot 'b | {
"end_col": 59,
"end_line": 69,
"start_col": 56,
"start_line": 69
} |
Prims.Tot | val return (#a: Type) (x: a) : erased a | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let return (#a: Type) (x: a) : erased a = hide x | val return (#a: Type) (x: a) : erased a
let return (#a: Type) (x: a) : erased a = | false | null | false | hide x | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"total"
] | [
"FStar.Ghost.hide",
"FStar.Ghost.erased"
] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may
be useful to apply this coercion explicitly. *)
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val return (#a: Type) (x: a) : erased a | [] | FStar.Ghost.return | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | x: a -> FStar.Ghost.erased a | {
"end_col": 48,
"end_line": 72,
"start_col": 42,
"start_line": 72
} |
Prims.Tot | val op_let_At (x: erased 'a) (f: ('a -> Tot (erased 'b))) : Tot (erased 'b) | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let (let@) (x:erased 'a) (f:('a -> Tot (erased 'b))) : Tot (erased 'b) = bind x f | val op_let_At (x: erased 'a) (f: ('a -> Tot (erased 'b))) : Tot (erased 'b)
let op_let_At (x: erased 'a) (f: ('a -> Tot (erased 'b))) : Tot (erased 'b) = | false | null | false | bind x f | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Ghost.bind"
] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may
be useful to apply this coercion explicitly. *)
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x
(** [erased]: Injecting a value into [erased]; just an alias of [hide] *)
let return (#a: Type) (x: a) : erased a = hide x
(** Sequential composition of erased *)
let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) =
let y = reveal x in
f y | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val op_let_At (x: erased 'a) (f: ('a -> Tot (erased 'b))) : Tot (erased 'b) | [] | FStar.Ghost.op_let_At | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | x: FStar.Ghost.erased 'a -> f: (_: 'a -> FStar.Ghost.erased 'b) -> FStar.Ghost.erased 'b | {
"end_col": 81,
"end_line": 80,
"start_col": 73,
"start_line": 80
} |
Prims.Tot | val elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)}) | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)}) =
let@ xx = x in return (f xx) | val elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)})
let elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)}) = | false | null | false | let@ xx = x in
return (f xx) | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Ghost.op_let_At",
"FStar.Ghost.return",
"Prims.eq2",
"FStar.Ghost.reveal"
] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may
be useful to apply this coercion explicitly. *)
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x
(** [erased]: Injecting a value into [erased]; just an alias of [hide] *)
let return (#a: Type) (x: a) : erased a = hide x
(** Sequential composition of erased *)
let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) =
let y = reveal x in
f y
unfold
let (let@) (x:erased 'a) (f:('a -> Tot (erased 'b))) : Tot (erased 'b) = bind x f
(** Unary map *)
irreducible
let elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a) | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)}) | [] | FStar.Ghost.elift1 | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: a -> Prims.GTot b) -> x: FStar.Ghost.erased a
-> y: FStar.Ghost.erased b {FStar.Ghost.reveal y == f (FStar.Ghost.reveal x)} | {
"end_col": 30,
"end_line": 86,
"start_col": 2,
"start_line": 86
} |
Prims.Tot | val elift3
(#a #b #c #d: Type)
(f: (a -> b -> c -> GTot d))
(ga: erased a)
(gb: erased b)
(gc: erased c)
: Tot (gd: erased d {reveal gd == f (reveal ga) (reveal gb) (reveal gc)}) | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let elift3
(#a #b #c #d: Type)
(f: (a -> b -> c -> GTot d))
(ga: erased a)
(gb: erased b)
(gc: erased c)
: Tot (gd: erased d {reveal gd == f (reveal ga) (reveal gb) (reveal gc)}) =
let@ a = ga in
let@ b = gb in
let@ c = gc in
return (f a b c) | val elift3
(#a #b #c #d: Type)
(f: (a -> b -> c -> GTot d))
(ga: erased a)
(gb: erased b)
(gc: erased c)
: Tot (gd: erased d {reveal gd == f (reveal ga) (reveal gb) (reveal gc)})
let elift3
(#a #b #c #d: Type)
(f: (a -> b -> c -> GTot d))
(ga: erased a)
(gb: erased b)
(gc: erased c)
: Tot (gd: erased d {reveal gd == f (reveal ga) (reveal gb) (reveal gc)}) = | false | null | false | let@ a = ga in
let@ b = gb in
let@ c = gc in
return (f a b c) | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Ghost.op_let_At",
"FStar.Ghost.return",
"Prims.eq2",
"FStar.Ghost.reveal"
] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may
be useful to apply this coercion explicitly. *)
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x
(** [erased]: Injecting a value into [erased]; just an alias of [hide] *)
let return (#a: Type) (x: a) : erased a = hide x
(** Sequential composition of erased *)
let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) =
let y = reveal x in
f y
unfold
let (let@) (x:erased 'a) (f:('a -> Tot (erased 'b))) : Tot (erased 'b) = bind x f
(** Unary map *)
irreducible
let elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)}) =
let@ xx = x in return (f xx)
(** Binary map *)
irreducible
let elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b)
: Tot (z: erased c {reveal z == f (reveal x) (reveal y)}) =
let@ xx = x in
let@ yy = y in
return (f xx yy)
(** Ternary map *)
irreducible
let elift3
(#a #b #c #d: Type)
(f: (a -> b -> c -> GTot d))
(ga: erased a)
(gb: erased b)
(gc: erased c) | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val elift3
(#a #b #c #d: Type)
(f: (a -> b -> c -> GTot d))
(ga: erased a)
(gb: erased b)
(gc: erased c)
: Tot (gd: erased d {reveal gd == f (reveal ga) (reveal gb) (reveal gc)}) | [] | FStar.Ghost.elift3 | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
f: (_: a -> _: b -> _: c -> Prims.GTot d) ->
ga: FStar.Ghost.erased a ->
gb: FStar.Ghost.erased b ->
gc: FStar.Ghost.erased c
-> gd:
FStar.Ghost.erased d
{ FStar.Ghost.reveal gd ==
f (FStar.Ghost.reveal ga) (FStar.Ghost.reveal gb) (FStar.Ghost.reveal gc) } | {
"end_col": 18,
"end_line": 108,
"start_col": 2,
"start_line": 105
} |
Prims.Tot | val elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b)
: Tot (z: erased c {reveal z == f (reveal x) (reveal y)}) | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b)
: Tot (z: erased c {reveal z == f (reveal x) (reveal y)}) =
let@ xx = x in
let@ yy = y in
return (f xx yy) | val elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b)
: Tot (z: erased c {reveal z == f (reveal x) (reveal y)})
let elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b)
: Tot (z: erased c {reveal z == f (reveal x) (reveal y)}) = | false | null | false | let@ xx = x in
let@ yy = y in
return (f xx yy) | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Ghost.op_let_At",
"FStar.Ghost.return",
"Prims.eq2",
"FStar.Ghost.reveal"
] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may
be useful to apply this coercion explicitly. *)
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x
(** [erased]: Injecting a value into [erased]; just an alias of [hide] *)
let return (#a: Type) (x: a) : erased a = hide x
(** Sequential composition of erased *)
let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) =
let y = reveal x in
f y
unfold
let (let@) (x:erased 'a) (f:('a -> Tot (erased 'b))) : Tot (erased 'b) = bind x f
(** Unary map *)
irreducible
let elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)}) =
let@ xx = x in return (f xx)
(** Binary map *)
irreducible
let elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b) | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b)
: Tot (z: erased c {reveal z == f (reveal x) (reveal y)}) | [] | FStar.Ghost.elift2 | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | f: (_: a -> _: b -> Prims.GTot c) -> x: FStar.Ghost.erased a -> y: FStar.Ghost.erased b
-> z:
FStar.Ghost.erased c {FStar.Ghost.reveal z == f (FStar.Ghost.reveal x) (FStar.Ghost.reveal y)} | {
"end_col": 18,
"end_line": 94,
"start_col": 2,
"start_line": 92
} |
Prims.Tot | val push_refinement (#a: _) (#p: (a -> Type0)) (r: erased a {p (reveal r)})
: erased (x: a{p x /\ x == reveal r}) | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let push_refinement #a (#p: (a -> Type0)) (r: erased a {p (reveal r)})
: erased (x: a{p x /\ x == reveal r}) =
let x:(x: a{p x}) = reveal r in
return x | val push_refinement (#a: _) (#p: (a -> Type0)) (r: erased a {p (reveal r)})
: erased (x: a{p x /\ x == reveal r})
let push_refinement #a (#p: (a -> Type0)) (r: erased a {p (reveal r)})
: erased (x: a{p x /\ x == reveal r}) = | false | null | false | let x:(x: a{p x}) = reveal r in
return x | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Ghost.reveal",
"FStar.Ghost.return",
"Prims.l_and",
"Prims.eq2"
] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may
be useful to apply this coercion explicitly. *)
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x
(** [erased]: Injecting a value into [erased]; just an alias of [hide] *)
let return (#a: Type) (x: a) : erased a = hide x
(** Sequential composition of erased *)
let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) =
let y = reveal x in
f y
unfold
let (let@) (x:erased 'a) (f:('a -> Tot (erased 'b))) : Tot (erased 'b) = bind x f
(** Unary map *)
irreducible
let elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)}) =
let@ xx = x in return (f xx)
(** Binary map *)
irreducible
let elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b)
: Tot (z: erased c {reveal z == f (reveal x) (reveal y)}) =
let@ xx = x in
let@ yy = y in
return (f xx yy)
(** Ternary map *)
irreducible
let elift3
(#a #b #c #d: Type)
(f: (a -> b -> c -> GTot d))
(ga: erased a)
(gb: erased b)
(gc: erased c)
: Tot (gd: erased d {reveal gd == f (reveal ga) (reveal gb) (reveal gc)}) =
let@ a = ga in
let@ b = gb in
let@ c = gc in
return (f a b c)
(** Pushing a refinement type under the [erased] constructor *) | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val push_refinement (#a: _) (#p: (a -> Type0)) (r: erased a {p (reveal r)})
: erased (x: a{p x /\ x == reveal r}) | [] | FStar.Ghost.push_refinement | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | r: FStar.Ghost.erased a {p (FStar.Ghost.reveal r)}
-> FStar.Ghost.erased (x: a{p x /\ x == FStar.Ghost.reveal r}) | {
"end_col": 10,
"end_line": 114,
"start_col": 43,
"start_line": 112
} |
Prims.Tot | val elift1_p
(#a #b: Type)
(#p: (a -> Type))
($f: (x: a{p x} -> GTot b))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let elift1_p
(#a #b: Type)
(#p: (a -> Type))
($f: (x: a{p x} -> GTot b))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) =
let x:(x: a{p x}) = reveal r in
return (f x) | val elift1_p
(#a #b: Type)
(#p: (a -> Type))
($f: (x: a{p x} -> GTot b))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)})
let elift1_p
(#a #b: Type)
(#p: (a -> Type))
($f: (x: a{p x} -> GTot b))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) = | false | null | false | let x:(x: a{p x}) = reveal r in
return (f x) | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Ghost.reveal",
"FStar.Ghost.return",
"Prims.eq2"
] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may
be useful to apply this coercion explicitly. *)
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x
(** [erased]: Injecting a value into [erased]; just an alias of [hide] *)
let return (#a: Type) (x: a) : erased a = hide x
(** Sequential composition of erased *)
let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) =
let y = reveal x in
f y
unfold
let (let@) (x:erased 'a) (f:('a -> Tot (erased 'b))) : Tot (erased 'b) = bind x f
(** Unary map *)
irreducible
let elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)}) =
let@ xx = x in return (f xx)
(** Binary map *)
irreducible
let elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b)
: Tot (z: erased c {reveal z == f (reveal x) (reveal y)}) =
let@ xx = x in
let@ yy = y in
return (f xx yy)
(** Ternary map *)
irreducible
let elift3
(#a #b #c #d: Type)
(f: (a -> b -> c -> GTot d))
(ga: erased a)
(gb: erased b)
(gc: erased c)
: Tot (gd: erased d {reveal gd == f (reveal ga) (reveal gb) (reveal gc)}) =
let@ a = ga in
let@ b = gb in
let@ c = gc in
return (f a b c)
(** Pushing a refinement type under the [erased] constructor *)
let push_refinement #a (#p: (a -> Type0)) (r: erased a {p (reveal r)})
: erased (x: a{p x /\ x == reveal r}) =
let x:(x: a{p x}) = reveal r in
return x
(** Mapping a function with a refined domain over a refined erased value *)
irreducible
let elift1_p
(#a #b: Type)
(#p: (a -> Type))
($f: (x: a{p x} -> GTot b)) | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val elift1_p
(#a #b: Type)
(#p: (a -> Type))
($f: (x: a{p x} -> GTot b))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) | [] | FStar.Ghost.elift1_p | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | $f: (x: a{p x} -> Prims.GTot b) -> r: FStar.Ghost.erased a {p (FStar.Ghost.reveal r)}
-> z: FStar.Ghost.erased b {FStar.Ghost.reveal z == f (FStar.Ghost.reveal r)} | {
"end_col": 14,
"end_line": 125,
"start_col": 52,
"start_line": 123
} |
Prims.Tot | val elift1_pq
(#a #b: Type)
(#p: (a -> Type))
(#q: (x: a{p x} -> b -> Type))
($f: (x: a{p x} -> GTot (y: b{q x y})))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let elift1_pq
(#a #b: Type)
(#p: (a -> Type))
(#q: (x: a{p x} -> b -> Type))
($f: (x: a{p x} -> GTot (y: b{q x y})))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) =
let x:(x: a{p x}) = reveal r in
return (f x) | val elift1_pq
(#a #b: Type)
(#p: (a -> Type))
(#q: (x: a{p x} -> b -> Type))
($f: (x: a{p x} -> GTot (y: b{q x y})))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)})
let elift1_pq
(#a #b: Type)
(#p: (a -> Type))
(#q: (x: a{p x} -> b -> Type))
($f: (x: a{p x} -> GTot (y: b{q x y})))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) = | false | null | false | let x:(x: a{p x}) = reveal r in
return (f x) | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Ghost.reveal",
"FStar.Ghost.return",
"Prims.eq2"
] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may
be useful to apply this coercion explicitly. *)
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x
(** [erased]: Injecting a value into [erased]; just an alias of [hide] *)
let return (#a: Type) (x: a) : erased a = hide x
(** Sequential composition of erased *)
let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) =
let y = reveal x in
f y
unfold
let (let@) (x:erased 'a) (f:('a -> Tot (erased 'b))) : Tot (erased 'b) = bind x f
(** Unary map *)
irreducible
let elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)}) =
let@ xx = x in return (f xx)
(** Binary map *)
irreducible
let elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b)
: Tot (z: erased c {reveal z == f (reveal x) (reveal y)}) =
let@ xx = x in
let@ yy = y in
return (f xx yy)
(** Ternary map *)
irreducible
let elift3
(#a #b #c #d: Type)
(f: (a -> b -> c -> GTot d))
(ga: erased a)
(gb: erased b)
(gc: erased c)
: Tot (gd: erased d {reveal gd == f (reveal ga) (reveal gb) (reveal gc)}) =
let@ a = ga in
let@ b = gb in
let@ c = gc in
return (f a b c)
(** Pushing a refinement type under the [erased] constructor *)
let push_refinement #a (#p: (a -> Type0)) (r: erased a {p (reveal r)})
: erased (x: a{p x /\ x == reveal r}) =
let x:(x: a{p x}) = reveal r in
return x
(** Mapping a function with a refined domain over a refined erased value *)
irreducible
let elift1_p
(#a #b: Type)
(#p: (a -> Type))
($f: (x: a{p x} -> GTot b))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) =
let x:(x: a{p x}) = reveal r in
return (f x)
(** Mapping a binary function with a refined domain over a pair of
refined erased values *)
irreducible
let elift2_p
(#a #b #c: Type)
(#p: (a -> b -> Type))
($f: (xa: a -> xb: b{p xa xb} -> GTot c))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (rc: erased c {reveal rc == f (reveal ra) (reveal rb)}) =
let x = reveal ra in
let y:(y: b{p x y}) = reveal rb in
return (f x y)
(** Mapping a function with a refined domain and co-domain over a
refined erased value producing a refined erased value *)
irreducible
let elift1_pq
(#a #b: Type)
(#p: (a -> Type))
(#q: (x: a{p x} -> b -> Type))
($f: (x: a{p x} -> GTot (y: b{q x y}))) | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val elift1_pq
(#a #b: Type)
(#p: (a -> Type))
(#q: (x: a{p x} -> b -> Type))
($f: (x: a{p x} -> GTot (y: b{q x y})))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) | [] | FStar.Ghost.elift1_pq | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | $f: (x: a{p x} -> Prims.GTot (y: b{q x y})) -> r: FStar.Ghost.erased a {p (FStar.Ghost.reveal r)}
-> z: FStar.Ghost.erased b {FStar.Ghost.reveal z == f (FStar.Ghost.reveal r)} | {
"end_col": 14,
"end_line": 152,
"start_col": 52,
"start_line": 150
} |
Prims.Tot | val elift2_p
(#a #b #c: Type)
(#p: (a -> b -> Type))
($f: (xa: a -> xb: b{p xa xb} -> GTot c))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (rc: erased c {reveal rc == f (reveal ra) (reveal rb)}) | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let elift2_p
(#a #b #c: Type)
(#p: (a -> b -> Type))
($f: (xa: a -> xb: b{p xa xb} -> GTot c))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (rc: erased c {reveal rc == f (reveal ra) (reveal rb)}) =
let x = reveal ra in
let y:(y: b{p x y}) = reveal rb in
return (f x y) | val elift2_p
(#a #b #c: Type)
(#p: (a -> b -> Type))
($f: (xa: a -> xb: b{p xa xb} -> GTot c))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (rc: erased c {reveal rc == f (reveal ra) (reveal rb)})
let elift2_p
(#a #b #c: Type)
(#p: (a -> b -> Type))
($f: (xa: a -> xb: b{p xa xb} -> GTot c))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (rc: erased c {reveal rc == f (reveal ra) (reveal rb)}) = | false | null | false | let x = reveal ra in
let y:(y: b{p x y}) = reveal rb in
return (f x y) | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Ghost.reveal",
"FStar.Ghost.return",
"Prims.eq2"
] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may
be useful to apply this coercion explicitly. *)
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x
(** [erased]: Injecting a value into [erased]; just an alias of [hide] *)
let return (#a: Type) (x: a) : erased a = hide x
(** Sequential composition of erased *)
let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) =
let y = reveal x in
f y
unfold
let (let@) (x:erased 'a) (f:('a -> Tot (erased 'b))) : Tot (erased 'b) = bind x f
(** Unary map *)
irreducible
let elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)}) =
let@ xx = x in return (f xx)
(** Binary map *)
irreducible
let elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b)
: Tot (z: erased c {reveal z == f (reveal x) (reveal y)}) =
let@ xx = x in
let@ yy = y in
return (f xx yy)
(** Ternary map *)
irreducible
let elift3
(#a #b #c #d: Type)
(f: (a -> b -> c -> GTot d))
(ga: erased a)
(gb: erased b)
(gc: erased c)
: Tot (gd: erased d {reveal gd == f (reveal ga) (reveal gb) (reveal gc)}) =
let@ a = ga in
let@ b = gb in
let@ c = gc in
return (f a b c)
(** Pushing a refinement type under the [erased] constructor *)
let push_refinement #a (#p: (a -> Type0)) (r: erased a {p (reveal r)})
: erased (x: a{p x /\ x == reveal r}) =
let x:(x: a{p x}) = reveal r in
return x
(** Mapping a function with a refined domain over a refined erased value *)
irreducible
let elift1_p
(#a #b: Type)
(#p: (a -> Type))
($f: (x: a{p x} -> GTot b))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) =
let x:(x: a{p x}) = reveal r in
return (f x)
(** Mapping a binary function with a refined domain over a pair of
refined erased values *)
irreducible
let elift2_p
(#a #b #c: Type)
(#p: (a -> b -> Type))
($f: (xa: a -> xb: b{p xa xb} -> GTot c))
(ra: erased a) | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val elift2_p
(#a #b #c: Type)
(#p: (a -> b -> Type))
($f: (xa: a -> xb: b{p xa xb} -> GTot c))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (rc: erased c {reveal rc == f (reveal ra) (reveal rb)}) | [] | FStar.Ghost.elift2_p | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
$f: (xa: a -> xb: b{p xa xb} -> Prims.GTot c) ->
ra: FStar.Ghost.erased a ->
rb: FStar.Ghost.erased b {p (FStar.Ghost.reveal ra) (FStar.Ghost.reveal rb)}
-> rc:
FStar.Ghost.erased c
{FStar.Ghost.reveal rc == f (FStar.Ghost.reveal ra) (FStar.Ghost.reveal rb)} | {
"end_col": 16,
"end_line": 139,
"start_col": 67,
"start_line": 136
} |
Prims.Tot | val elift2_pq
(#a #b #c: Type)
(#p: (a -> b -> Type))
(#q: (x: a -> y: b{p x y} -> c -> Type))
($f: (x: a -> y: b{p x y} -> GTot (z: c{q x y z})))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (z: erased c {reveal z == f (reveal ra) (reveal rb)}) | [
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let elift2_pq
(#a #b #c: Type)
(#p: (a -> b -> Type))
(#q: (x: a -> y: b{p x y} -> c -> Type))
($f: (x: a -> y: b{p x y} -> GTot (z: c{q x y z})))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (z: erased c {reveal z == f (reveal ra) (reveal rb)}) =
let x = reveal ra in
let y:(y: b{p x y}) = reveal rb in
return (f x y) | val elift2_pq
(#a #b #c: Type)
(#p: (a -> b -> Type))
(#q: (x: a -> y: b{p x y} -> c -> Type))
($f: (x: a -> y: b{p x y} -> GTot (z: c{q x y z})))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (z: erased c {reveal z == f (reveal ra) (reveal rb)})
let elift2_pq
(#a #b #c: Type)
(#p: (a -> b -> Type))
(#q: (x: a -> y: b{p x y} -> c -> Type))
($f: (x: a -> y: b{p x y} -> GTot (z: c{q x y z})))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (z: erased c {reveal z == f (reveal ra) (reveal rb)}) = | false | null | false | let x = reveal ra in
let y:(y: b{p x y}) = reveal rb in
return (f x y) | {
"checked_file": "FStar.Ghost.fsti.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "FStar.Ghost.fsti"
} | [
"total"
] | [
"FStar.Ghost.erased",
"FStar.Ghost.reveal",
"FStar.Ghost.return",
"Prims.eq2"
] | [] | (*
Copyright 2008-2014 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module FStar.Ghost
/// This module provides an erased type to abstract computationally
/// irrelevant values.
///
/// It relies on the GHOST effect defined in Prims.
///
/// [erased a] is decorated with the erasable attribute. As such,
///
/// 1. The type is considered non-informative.
///
/// So, [Ghost (erased a)] can be subsumed to [Pure (erased a)]
///
/// 2. The compiler extracts [erased a] to [unit]
///
/// The type is [erased a] is in a bijection with [a], as
/// witnessed by the [hide] and [reveal] function.
///
/// Importantly, computationally relevant code cannot use [reveal]
/// (it's marked [GTot])
///
/// Just like Coq's prop, it is okay to use erased types
/// freely as long as we produce an erased type.
///
/// [reveal] and [hide] are coercions: the typechecker will
/// automatically insert them when required. That is, if the type of
/// an expression is [erased X], and the expected type is NOT an
/// [erased Y], it will insert [reveal], and vice versa for [hide].
(** [erased t] is the computationally irrelevant counterpart of [t] *)
[@@ erasable]
new
val erased ([@@@strictly_positive] a: Type u#a) : Type u#a
(** [erased t] is in a bijection with [t], as witnessed by [reveal]
and [hide] *)
val reveal: #a: Type u#a -> erased a -> GTot a
val hide: #a: Type u#a -> a -> Tot (erased a)
val hide_reveal (#a: Type) (x: erased a)
: Lemma (ensures (hide (reveal x) == x)) [SMTPat (reveal x)]
val reveal_hide (#a: Type) (x: a) : Lemma (ensures (reveal (hide x) == x)) [SMTPat (hide x)]
/// The rest of this module includes several well-defined defined
/// notions. They are not trusted.
(** [Tot] is a sub-effect of [GTot] F* will usually subsume [Tot]
computations to [GTot] computations, though, occasionally, it may
be useful to apply this coercion explicitly. *)
let tot_to_gtot (f: ('a -> Tot 'b)) (x: 'a) : GTot 'b = f x
(** [erased]: Injecting a value into [erased]; just an alias of [hide] *)
let return (#a: Type) (x: a) : erased a = hide x
(** Sequential composition of erased *)
let bind (#a #b: Type) (x: erased a) (f: (a -> Tot (erased b))) : Tot (erased b) =
let y = reveal x in
f y
unfold
let (let@) (x:erased 'a) (f:('a -> Tot (erased 'b))) : Tot (erased 'b) = bind x f
(** Unary map *)
irreducible
let elift1 (#a #b: Type) (f: (a -> GTot b)) (x: erased a)
: Tot (y: erased b {reveal y == f (reveal x)}) =
let@ xx = x in return (f xx)
(** Binary map *)
irreducible
let elift2 (#a #b #c: Type) (f: (a -> b -> GTot c)) (x: erased a) (y: erased b)
: Tot (z: erased c {reveal z == f (reveal x) (reveal y)}) =
let@ xx = x in
let@ yy = y in
return (f xx yy)
(** Ternary map *)
irreducible
let elift3
(#a #b #c #d: Type)
(f: (a -> b -> c -> GTot d))
(ga: erased a)
(gb: erased b)
(gc: erased c)
: Tot (gd: erased d {reveal gd == f (reveal ga) (reveal gb) (reveal gc)}) =
let@ a = ga in
let@ b = gb in
let@ c = gc in
return (f a b c)
(** Pushing a refinement type under the [erased] constructor *)
let push_refinement #a (#p: (a -> Type0)) (r: erased a {p (reveal r)})
: erased (x: a{p x /\ x == reveal r}) =
let x:(x: a{p x}) = reveal r in
return x
(** Mapping a function with a refined domain over a refined erased value *)
irreducible
let elift1_p
(#a #b: Type)
(#p: (a -> Type))
($f: (x: a{p x} -> GTot b))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) =
let x:(x: a{p x}) = reveal r in
return (f x)
(** Mapping a binary function with a refined domain over a pair of
refined erased values *)
irreducible
let elift2_p
(#a #b #c: Type)
(#p: (a -> b -> Type))
($f: (xa: a -> xb: b{p xa xb} -> GTot c))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (rc: erased c {reveal rc == f (reveal ra) (reveal rb)}) =
let x = reveal ra in
let y:(y: b{p x y}) = reveal rb in
return (f x y)
(** Mapping a function with a refined domain and co-domain over a
refined erased value producing a refined erased value *)
irreducible
let elift1_pq
(#a #b: Type)
(#p: (a -> Type))
(#q: (x: a{p x} -> b -> Type))
($f: (x: a{p x} -> GTot (y: b{q x y})))
(r: erased a {p (reveal r)})
: Tot (z: erased b {reveal z == f (reveal r)}) =
let x:(x: a{p x}) = reveal r in
return (f x)
(** Mapping a binary function with a refined domain and co-domain over
a pair of refined erased values producing a refined erased value
*)
irreducible
let elift2_pq
(#a #b #c: Type)
(#p: (a -> b -> Type))
(#q: (x: a -> y: b{p x y} -> c -> Type))
($f: (x: a -> y: b{p x y} -> GTot (z: c{q x y z})))
(ra: erased a) | false | false | FStar.Ghost.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val elift2_pq
(#a #b #c: Type)
(#p: (a -> b -> Type))
(#q: (x: a -> y: b{p x y} -> c -> Type))
($f: (x: a -> y: b{p x y} -> GTot (z: c{q x y z})))
(ra: erased a)
(rb: erased b {p (reveal ra) (reveal rb)})
: Tot (z: erased c {reveal z == f (reveal ra) (reveal rb)}) | [] | FStar.Ghost.elift2_pq | {
"file_name": "ulib/FStar.Ghost.fsti",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
$f: (x: a -> y: b{p x y} -> Prims.GTot (z: c{q x y z})) ->
ra: FStar.Ghost.erased a ->
rb: FStar.Ghost.erased b {p (FStar.Ghost.reveal ra) (FStar.Ghost.reveal rb)}
-> z:
FStar.Ghost.erased c {FStar.Ghost.reveal z == f (FStar.Ghost.reveal ra) (FStar.Ghost.reveal rb)} | {
"end_col": 16,
"end_line": 168,
"start_col": 65,
"start_line": 165
} |
Prims.Tot | val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p | val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p = | false | null | false | parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` (parse_tagged_union_payload tag_of_data p) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Combinators.and_then",
"LowParse.Spec.Combinators.parse_tagged_union_payload",
"Prims.unit",
"LowParse.Spec.Combinators.parse_tagged_union_payload_and_then_cases_injective",
"LowParse.Spec.Combinators.and_then_kind"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
: Tot (parser (and_then_kind kt k) data_t) | [] | LowParse.Spec.Combinators.parse_tagged_union | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
pt: LowParse.Spec.Base.parser kt tag_t ->
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
p: (t: tag_t -> LowParse.Spec.Base.parser k (LowParse.Spec.Base.refine_with_tag tag_of_data t))
-> LowParse.Spec.Base.parser (LowParse.Spec.Combinators.and_then_kind kt k) data_t | {
"end_col": 56,
"end_line": 202,
"start_col": 2,
"start_line": 201
} |
Prims.Tot | val tot_and_then_bare (#t #t': Type) (p: tot_bare_parser t) (p': (t -> Tot (tot_bare_parser t')))
: Tot (tot_bare_parser t') | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None | val tot_and_then_bare (#t #t': Type) (p: tot_bare_parser t) (p': (t -> Tot (tot_bare_parser t')))
: Tot (tot_bare_parser t')
let tot_and_then_bare (#t #t': Type) (p: tot_bare_parser t) (p': (t -> Tot (tot_bare_parser t')))
: Tot (tot_bare_parser t') = | false | null | false | fun (b: bytes) ->
match p b with
| Some (v, l) ->
let p'v = p' v in
let s':bytes = Seq.slice b l (Seq.length b) in
(match p'v s' with
| Some (v', l') ->
let res:consumed_length b = l + l' in
Some (v', res)
| None -> None)
| None -> None | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.tot_bare_parser",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.option",
"FStar.Seq.Base.slice",
"LowParse.Bytes.byte",
"FStar.Seq.Base.length"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) : | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_and_then_bare (#t #t': Type) (p: tot_bare_parser t) (p': (t -> Tot (tot_bare_parser t')))
: Tot (tot_bare_parser t') | [] | LowParse.Spec.Combinators.tot_and_then_bare | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | p: LowParse.Spec.Base.tot_bare_parser t -> p': (_: t -> LowParse.Spec.Base.tot_bare_parser t')
-> LowParse.Spec.Base.tot_bare_parser t' | {
"end_col": 18,
"end_line": 46,
"start_col": 4,
"start_line": 34
} |
Prims.Tot | val tot_parse_fret' (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_bare_parser t') | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) | val tot_parse_fret' (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_bare_parser t')
let tot_parse_fret' (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_bare_parser t') = | false | null | false | fun (b: bytes) -> Some (f v, (0 <: consumed_length b)) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"total"
] | [
"LowParse.Bytes.bytes",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.tot_bare_parser"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_parse_fret' (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_bare_parser t') | [] | LowParse.Spec.Combinators.tot_parse_fret' | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | f: (_: t -> t') -> v: t -> LowParse.Spec.Base.tot_bare_parser t' | {
"end_col": 56,
"end_line": 81,
"start_col": 2,
"start_line": 81
} |
Prims.Pure | val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f | val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True))
let and_then #k #t p #k' #t' p' = | false | null | false | let f:bare_parser t' = and_then_bare p p' in
and_then_correct p p';
f | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"Prims.unit",
"LowParse.Spec.Combinators.and_then_correct",
"LowParse.Spec.Base.bare_parser",
"LowParse.Spec.Combinators.and_then_bare",
"LowParse.Spec.Combinators.and_then_kind"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'" | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val and_then
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
: Pure (parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun _ -> True)) | [] | LowParse.Spec.Combinators.and_then | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | p: LowParse.Spec.Base.parser k t -> p': (_: t -> LowParse.Spec.Base.parser k' t')
-> Prims.Pure (LowParse.Spec.Base.parser (LowParse.Spec.Combinators.and_then_kind k k') t') | {
"end_col": 3,
"end_line": 15,
"start_col": 33,
"start_line": 12
} |
Prims.Tot | val tot_nondep_then_bare (#t1: Type) (p1: tot_bare_parser t1) (#t2: Type) (p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None | val tot_nondep_then_bare (#t1: Type) (p1: tot_bare_parser t1) (#t2: Type) (p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
let tot_nondep_then_bare (#t1: Type) (p1: tot_bare_parser t1) (#t2: Type) (p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2)) = | false | null | false | fun b ->
match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
(match p2 b' with
| Some (x2, consumed2) -> Some ((x1, x2), consumed1 + consumed2)
| _ -> None)
| _ -> None | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.tot_bare_parser",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.consumed_length",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.None",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_nondep_then_bare (#t1: Type) (p1: tot_bare_parser t1) (#t2: Type) (p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2)) | [] | LowParse.Spec.Combinators.tot_nondep_then_bare | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | p1: LowParse.Spec.Base.tot_bare_parser t1 -> p2: LowParse.Spec.Base.tot_bare_parser t2
-> LowParse.Spec.Base.tot_bare_parser (t1 * t2) | {
"end_col": 13,
"end_line": 412,
"start_col": 2,
"start_line": 404
} |
Prims.Tot | val tot_parse_fret (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_parser parse_ret_kind t') | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v | val tot_parse_fret (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_parser parse_ret_kind t')
let tot_parse_fret (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_parser parse_ret_kind t') = | false | null | false | [@@ inline_let ]let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"total"
] | [
"LowParse.Spec.Combinators.tot_parse_fret'",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.parse_ret_kind",
"LowParse.Spec.Base.tot_parser"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_parse_fret (#t #t': Type) (f: (t -> Tot t')) (v: t) : Tot (tot_parser parse_ret_kind t') | [] | LowParse.Spec.Combinators.tot_parse_fret | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | f: (_: t -> t') -> v: t -> LowParse.Spec.Base.tot_parser LowParse.Spec.Combinators.parse_ret_kind t' | {
"end_col": 21,
"end_line": 86,
"start_col": 2,
"start_line": 85
} |
Prims.Tot | val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ()) | val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1 -> parser k2 (t2 x)))
(s2: (x: t1 -> serializer (p2 x)))
: Tot (serializer (parse_dtuple2 p1 p2)) = | false | null | false | serialize_tagged_union s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ()) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"LowParse.Spec.Combinators.serialize_tagged_union",
"Prims.dtuple2",
"FStar.Pervasives.dfst",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Combinators.synth_dtuple2",
"LowParse.Spec.Combinators.serialize_synth",
"LowParse.Spec.Combinators.synth_dtuple2_recip",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.parse_dtuple2"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x)) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2)) | [] | LowParse.Spec.Combinators.serialize_dtuple2 | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: (x: t1 -> LowParse.Spec.Base.serializer (p2 x))
-> LowParse.Spec.Base.serializer (LowParse.Spec.Combinators.parse_dtuple2 p1 p2) | {
"end_col": 95,
"end_line": 317,
"start_col": 2,
"start_line": 314
} |
Prims.Pure | val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s | val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(s: (t: tag_t -> Tot (serializer (p t))))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) = | false | null | false | bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Combinators.bare_serialize_tagged_union",
"Prims.unit",
"LowParse.Spec.Combinators.bare_serialize_tagged_union_correct",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.parse_tagged_union",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"Prims.l_True"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong)) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True)) | [] | LowParse.Spec.Combinators.serialize_tagged_union | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
st: LowParse.Spec.Base.serializer pt ->
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
s: (t: tag_t -> LowParse.Spec.Base.serializer (p t))
-> Prims.Pure
(LowParse.Spec.Base.serializer (LowParse.Spec.Combinators.parse_tagged_union pt tag_of_data p)) | {
"end_col": 46,
"end_line": 285,
"start_col": 2,
"start_line": 284
} |
Prims.Pure | val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f | val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
))
let tot_and_then #k #t p #k' #t' p' = | false | null | false | let f:tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p';
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_ext",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.and_then_bare",
"LowParse.Spec.Combinators.and_then_correct",
"LowParse.Spec.Base.tot_bare_parser",
"LowParse.Spec.Combinators.tot_and_then_bare"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_and_then
(#k: parser_kind)
(#t:Type)
(p:tot_parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (tot_parser k' t')))
: Pure (tot_parser (and_then_kind k k') t')
(requires (
and_then_cases_injective p'
))
(ensures (fun y ->
forall x . parse y x == parse (and_then #k p #k' p') x
)) | [] | LowParse.Spec.Combinators.tot_and_then | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | p: LowParse.Spec.Base.tot_parser k t -> p': (_: t -> LowParse.Spec.Base.tot_parser k' t')
-> Prims.Pure (LowParse.Spec.Base.tot_parser (LowParse.Spec.Combinators.and_then_kind k k') t') | {
"end_col": 3,
"end_line": 52,
"start_col": 37,
"start_line": 48
} |
FStar.Pervasives.Lemma | val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg | val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (input: bytes -> Lemma (parse pt input == parse pt' input)))
(k': (t: tag_t -> Tot parser_kind))
(p': (t: tag_t -> Tot (parser (k' t) (refine_with_tag tag_of_data t))))
(lem_p': (k: tag_t -> input: bytes -> Lemma (parse (p k) input == parse (p' k) input)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input ==
bare_parse_tagged_union pt' tag_of_data k' p' input) = | false | null | true | parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k
#(refine_with_tag tag_of_data tg)
(p tg)
(synth_tagged_union_data tag_of_data tg)
input_tg;
lem_p' tg input_tg | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"lemma"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Bytes.bytes",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"Prims.Nil",
"FStar.Pervasives.pattern",
"LowParse.Spec.Combinators.parse_synth_eq",
"LowParse.Spec.Combinators.synth_tagged_union_data",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"LowParse.Spec.Combinators.and_then_eq",
"LowParse.Spec.Combinators.parse_tagged_union_payload",
"LowParse.Spec.Combinators.parse_tagged_union_payload_and_then_cases_injective",
"LowParse.Spec.Combinators.parse_tagged_union",
"LowParse.Spec.Combinators.bare_parse_tagged_union"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input) | [] | LowParse.Spec.Combinators.parse_tagged_union_eq_gen | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
pt: LowParse.Spec.Base.parser kt tag_t ->
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
p: (t: tag_t -> LowParse.Spec.Base.parser k (LowParse.Spec.Base.refine_with_tag tag_of_data t)) ->
pt': LowParse.Spec.Base.parser kt' tag_t ->
lem_pt:
(input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures LowParse.Spec.Base.parse pt input == LowParse.Spec.Base.parse pt' input)) ->
k': (t: tag_t -> LowParse.Spec.Base.parser_kind) ->
p':
(t: tag_t
-> LowParse.Spec.Base.parser (k' t) (LowParse.Spec.Base.refine_with_tag tag_of_data t)) ->
lem_p':
(k: tag_t -> input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures LowParse.Spec.Base.parse (p k) input == LowParse.Spec.Base.parse (p' k) input)) ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Combinators.parse_tagged_union pt tag_of_data p) input ==
LowParse.Spec.Combinators.bare_parse_tagged_union pt' tag_of_data k' p' input) | {
"end_col": 22,
"end_line": 265,
"start_col": 2,
"start_line": 257
} |
Prims.Tot | val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ()) | val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2)) = | false | null | false | serialize_tagged_union s1
fst
(fun x ->
serialize_synth p2
(fun y -> (x, y) <: refine_with_tag fst x)
s2
(fun (xy: refine_with_tag fst x) -> snd xy)
()) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"LowParse.Spec.Combinators.serialize_tagged_union",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.fst",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Base.refine_with_tag",
"FStar.Pervasives.Native.Mktuple2",
"LowParse.Spec.Combinators.serialize_synth",
"FStar.Pervasives.Native.snd",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2)) | [] | LowParse.Spec.Combinators.serialize_nondep_then | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: LowParse.Spec.Base.serializer p2
-> LowParse.Spec.Base.serializer (LowParse.Spec.Combinators.nondep_then p1 p2) | {
"end_col": 126,
"end_line": 432,
"start_col": 2,
"start_line": 429
} |
Prims.Tot | val tot_parse_filter_payload (#t: Type) (f: (t -> Tot bool)) (v: t)
: Tot (tot_parser parse_filter_payload_kind (parse_filter_refine f)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_parse_filter_payload
(#t: Type)
(f: (t -> Tot bool))
(v: t)
: Tot (tot_parser parse_filter_payload_kind (parse_filter_refine f))
= let p : tot_bare_parser (parse_filter_refine f) =
if f v
then
let v' : (x: t { f x == true } ) = v in
tot_weaken parse_filter_payload_kind (tot_parse_ret v')
else tot_fail_parser parse_filter_payload_kind (parse_filter_refine f)
in
parser_kind_prop_equiv parse_filter_payload_kind p;
p | val tot_parse_filter_payload (#t: Type) (f: (t -> Tot bool)) (v: t)
: Tot (tot_parser parse_filter_payload_kind (parse_filter_refine f))
let tot_parse_filter_payload (#t: Type) (f: (t -> Tot bool)) (v: t)
: Tot (tot_parser parse_filter_payload_kind (parse_filter_refine f)) = | false | null | false | let p:tot_bare_parser (parse_filter_refine f) =
if f v
then
let v':(x: t{f x == true}) = v in
tot_weaken parse_filter_payload_kind (tot_parse_ret v')
else tot_fail_parser parse_filter_payload_kind (parse_filter_refine f)
in
parser_kind_prop_equiv parse_filter_payload_kind p;
p | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"total"
] | [
"Prims.bool",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_equiv",
"LowParse.Spec.Combinators.parse_filter_refine",
"LowParse.Spec.Combinators.parse_filter_payload_kind",
"LowParse.Spec.Base.tot_bare_parser",
"LowParse.Spec.Base.tot_weaken",
"LowParse.Spec.Combinators.parse_ret_kind",
"LowParse.Spec.Combinators.tot_parse_ret",
"Prims.eq2",
"LowParse.Spec.Combinators.tot_fail_parser",
"LowParse.Spec.Base.tot_parser"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1')
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
= let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' == Seq.length (serialize (serialize_nondep_then s1 s2) x) - (Seq.length (serialize s2 (snd x)) + i') - Seq.length s')
let serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_right s (serialize s2 y);
let l2 = Seq.length s - Seq.length (serialize s2 (snd x)) in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
= serialize_nondep_then_upd_right s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s2' = serialize s2 (snd x) in
let l2 = Seq.length s - Seq.length s2' in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) s2';
seq_upd_seq_right_to_left s l2 s2' i' s';
seq_upd_seq_slice_idem s l2 (Seq.length s)
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let make_total_constant_size_parser_compose
(sz: nat)
(t1 t2: Type)
(f1: ((s: bytes {Seq.length s == sz}) -> GTot t1))
(g2: t1 -> GTot t2)
: Lemma
(requires (
make_total_constant_size_parser_precond sz t1 f1 /\
(forall x x' . g2 x == g2 x' ==> x == x')
))
(ensures (
make_total_constant_size_parser_precond sz t1 f1 /\
make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\
(forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\
(forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input)
))
= ()
let parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f))
= p `and_then` (parse_filter_payload f)
let parse_filter_eq
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
(input: bytes)
: Lemma
(parse (parse_filter p f) input == (match parse p input with
| None -> None
| Some (x, consumed) ->
if f x
then Some (x, consumed)
else None
))
= ()
let tot_parse_filter_payload
(#t: Type)
(f: (t -> Tot bool))
(v: t) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_parse_filter_payload (#t: Type) (f: (t -> Tot bool)) (v: t)
: Tot (tot_parser parse_filter_payload_kind (parse_filter_refine f)) | [] | LowParse.Spec.Combinators.tot_parse_filter_payload | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | f: (_: t -> Prims.bool) -> v: t
-> LowParse.Spec.Base.tot_parser LowParse.Spec.Combinators.parse_filter_payload_kind
(LowParse.Spec.Combinators.parse_filter_refine f) | {
"end_col": 3,
"end_line": 689,
"start_col": 1,
"start_line": 681
} |
Prims.Tot | val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x)) | val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2)) = | false | null | false | parse_tagged_union p1 fst (fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x)) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Combinators.parse_tagged_union",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.fst",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Base.refine_with_tag",
"FStar.Pervasives.Native.Mktuple2",
"LowParse.Spec.Combinators.and_then_kind"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2)) | [] | LowParse.Spec.Combinators.nondep_then | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | p1: LowParse.Spec.Base.parser k1 t1 -> p2: LowParse.Spec.Base.parser k2 t2
-> LowParse.Spec.Base.parser (LowParse.Spec.Combinators.and_then_kind k1 k2) (t1 * t2) | {
"end_col": 72,
"end_line": 370,
"start_col": 2,
"start_line": 367
} |
Prims.Tot | val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1 | val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
let serialize_synth
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
(u: unit{synth_inverse f2 g1 /\ synth_injective f2})
: Tot (serializer (parse_synth p1 f2)) = | false | null | false | bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1 | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.unit",
"Prims.l_and",
"LowParse.Spec.Combinators.synth_inverse",
"LowParse.Spec.Combinators.synth_injective",
"LowParse.Spec.Combinators.bare_serialize_synth",
"LowParse.Spec.Combinators.bare_serialize_synth_correct",
"LowParse.Spec.Combinators.parse_synth"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
}) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2)) | [] | LowParse.Spec.Combinators.serialize_synth | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p1: LowParse.Spec.Base.parser k t1 ->
f2: (_: t1 -> Prims.GTot t2) ->
s1: LowParse.Spec.Base.serializer p1 ->
g1: (_: t2 -> Prims.GTot t1) ->
u241:
u244:
Prims.unit
{ LowParse.Spec.Combinators.synth_inverse f2 g1 /\
LowParse.Spec.Combinators.synth_injective f2 }
-> LowParse.Spec.Base.serializer (LowParse.Spec.Combinators.parse_synth p1 f2) | {
"end_col": 34,
"end_line": 109,
"start_col": 2,
"start_line": 108
} |
FStar.Pervasives.Lemma | val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w) | val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
(u: unit{synth_inverse f2 g1 /\ synth_injective f2})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s': bytes)
: Lemma
(requires
(let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\ y2 == f2 y1))
(ensures
(let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s')) = | false | null | true | assert (forall w w'. f2 w == f2 w' ==> w == w');
assert (forall w. f2 (g1 w) == w) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"lemma"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.unit",
"Prims.l_and",
"LowParse.Spec.Combinators.synth_inverse",
"LowParse.Spec.Combinators.synth_injective",
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims._assert",
"Prims.l_Forall",
"Prims.eq2",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Seq.Base.seq",
"LowParse.Spec.Base.serialize",
"LowParse.Spec.Base.seq_upd_bw_seq",
"Prims.squash",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Combinators.serialize_synth",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
)) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
)) | [] | LowParse.Spec.Combinators.serialize_synth_upd_bw_chain | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p1: LowParse.Spec.Base.parser k t1 ->
f2: (_: t1 -> Prims.GTot t2) ->
s1: LowParse.Spec.Base.serializer p1 ->
g1: (_: t2 -> Prims.GTot t1) ->
u299:
u314:
Prims.unit
{ LowParse.Spec.Combinators.synth_inverse f2 g1 /\
LowParse.Spec.Combinators.synth_injective f2 } ->
x1: t1 ->
x2: t2 ->
y1: t1 ->
y2: t2 ->
i': Prims.nat ->
s': LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(requires
(let s = LowParse.Spec.Base.serialize s1 x1 in
i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize s1 y1 == LowParse.Spec.Base.seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\ y2 == f2 y1))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_synth p1 f2 s1 g1 u299
)
x2
in
i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\
FStar.Seq.Base.length s == FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 x1) /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_synth p1 f2 s1 g1 u299)
y2 ==
LowParse.Spec.Base.seq_upd_bw_seq s i' s')) | {
"end_col": 36,
"end_line": 198,
"start_col": 2,
"start_line": 197
} |
FStar.Pervasives.Lemma | val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w) | val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1 #t2: Type)
(p1: parser k t1)
(f2: (t1 -> GTot t2))
(s1: serializer p1)
(g1: (t2 -> GTot t1))
(u: unit{synth_inverse f2 g1 /\ synth_injective f2})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s': bytes)
: Lemma
(requires
(let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\ serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\ y2 == f2 y1))
(ensures
(let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\ Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s')) = | false | null | true | assert (forall w w'. f2 w == f2 w' ==> w == w');
assert (forall w. f2 (g1 w) == w) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"lemma"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.unit",
"Prims.l_and",
"LowParse.Spec.Combinators.synth_inverse",
"LowParse.Spec.Combinators.synth_injective",
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims._assert",
"Prims.l_Forall",
"Prims.eq2",
"Prims.l_imp",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"FStar.Seq.Base.seq",
"LowParse.Spec.Base.serialize",
"LowParse.Spec.Base.seq_upd_seq",
"Prims.squash",
"LowParse.Spec.Combinators.parse_synth",
"LowParse.Spec.Combinators.serialize_synth",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
)) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
)) | [] | LowParse.Spec.Combinators.serialize_synth_upd_chain | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
p1: LowParse.Spec.Base.parser k t1 ->
f2: (_: t1 -> Prims.GTot t2) ->
s1: LowParse.Spec.Base.serializer p1 ->
g1: (_: t2 -> Prims.GTot t1) ->
u285:
u300:
Prims.unit
{ LowParse.Spec.Combinators.synth_inverse f2 g1 /\
LowParse.Spec.Combinators.synth_injective f2 } ->
x1: t1 ->
x2: t2 ->
y1: t1 ->
y2: t2 ->
i': Prims.nat ->
s': LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(requires
(let s = LowParse.Spec.Base.serialize s1 x1 in
i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize s1 y1 == LowParse.Spec.Base.seq_upd_seq s i' s' /\
x2 == f2 x1 /\ y2 == f2 y1))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_synth p1 f2 s1 g1 u285
)
x2
in
i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\
FStar.Seq.Base.length s == FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 x1) /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_synth p1 f2 s1 g1 u285)
y2 ==
LowParse.Spec.Base.seq_upd_seq s i' s')) | {
"end_col": 36,
"end_line": 162,
"start_col": 2,
"start_line": 161
} |
Prims.Pure | val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1)) | val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
let parse_synth (#k: parser_kind) (#t1 #t2: Type) (p1: parser k t1) (f2: (t1 -> GTot t2))
: Pure (parser k t2) (requires (synth_injective f2)) (ensures (fun _ -> True)) = | false | null | false | coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1)) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.coerce",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.parse_ret_kind",
"LowParse.Spec.Combinators.and_then",
"LowParse.Spec.Combinators.parse_fret",
"LowParse.Spec.Combinators.synth_injective",
"Prims.l_True"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
)) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True)) | [] | LowParse.Spec.Combinators.parse_synth | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | p1: LowParse.Spec.Base.parser k t1 -> f2: (_: t1 -> Prims.GTot t2)
-> Prims.Pure (LowParse.Spec.Base.parser k t2) | {
"end_col": 65,
"end_line": 65,
"start_col": 2,
"start_line": 65
} |
Prims.Pure | val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2 | val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
))
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 = | false | null | false | Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2 | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Combinators.tot_nondep_then_bare",
"Prims.unit",
"LowParse.Spec.Base.parser_kind_prop_ext",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then",
"FStar.Classical.forall_intro",
"LowParse.Bytes.bytes",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Base.parse",
"FStar.Seq.Base.slice",
"LowParse.Bytes.byte",
"FStar.Seq.Base.length",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"FStar.Pervasives.Native.None",
"LowParse.Spec.Combinators.nondep_then_eq"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: tot_parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: tot_parser k2 t2)
: Pure (tot_parser (and_then_kind k1 k2) (t1 * t2))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (nondep_then #k1 p1 #k2 p2) x
)) | [] | LowParse.Spec.Combinators.tot_nondep_then | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | p1: LowParse.Spec.Base.tot_parser k1 t1 -> p2: LowParse.Spec.Base.tot_parser k2 t2
-> Prims.Pure
(LowParse.Spec.Base.tot_parser (LowParse.Spec.Combinators.and_then_kind k1 k2) (t1 * t2)) | {
"end_col": 28,
"end_line": 417,
"start_col": 2,
"start_line": 415
} |
Prims.Tot | val parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f))
= p `and_then` (parse_filter_payload f) | val parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f))
let parse_filter (#k: parser_kind) (#t: Type) (p: parser k t) (f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f)) = | false | null | false | p `and_then` (parse_filter_payload f) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"total"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"Prims.bool",
"LowParse.Spec.Combinators.and_then",
"LowParse.Spec.Combinators.parse_filter_payload_kind",
"LowParse.Spec.Combinators.parse_filter_refine",
"LowParse.Spec.Combinators.parse_filter_payload",
"LowParse.Spec.Combinators.parse_filter_kind"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1')
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
= let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' == Seq.length (serialize (serialize_nondep_then s1 s2) x) - (Seq.length (serialize s2 (snd x)) + i') - Seq.length s')
let serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_right s (serialize s2 y);
let l2 = Seq.length s - Seq.length (serialize s2 (snd x)) in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
= serialize_nondep_then_upd_right s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s2' = serialize s2 (snd x) in
let l2 = Seq.length s - Seq.length s2' in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) s2';
seq_upd_seq_right_to_left s l2 s2' i' s';
seq_upd_seq_slice_idem s l2 (Seq.length s)
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let make_total_constant_size_parser_compose
(sz: nat)
(t1 t2: Type)
(f1: ((s: bytes {Seq.length s == sz}) -> GTot t1))
(g2: t1 -> GTot t2)
: Lemma
(requires (
make_total_constant_size_parser_precond sz t1 f1 /\
(forall x x' . g2 x == g2 x' ==> x == x')
))
(ensures (
make_total_constant_size_parser_precond sz t1 f1 /\
make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\
(forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\
(forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input)
))
= ()
let parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool)) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f)) | [] | LowParse.Spec.Combinators.parse_filter | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | p: LowParse.Spec.Base.parser k t -> f: (_: t -> Prims.GTot Prims.bool)
-> LowParse.Spec.Base.parser (LowParse.Spec.Combinators.parse_filter_kind k)
(LowParse.Spec.Combinators.parse_filter_refine f) | {
"end_col": 39,
"end_line": 658,
"start_col": 2,
"start_line": 658
} |
FStar.Pervasives.Lemma | val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg | val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t -> Tot (parser k (refine_with_tag tag_of_data t))))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input ==
(match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None)) = | false | null | true | parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k
#(refine_with_tag tag_of_data tg)
(p tg)
(synth_tagged_union_data tag_of_data tg)
input_tg | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"lemma"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Bytes.bytes",
"LowParse.Spec.Base.parse",
"LowParse.Spec.Base.consumed_length",
"LowParse.Spec.Combinators.parse_synth_eq",
"LowParse.Spec.Combinators.synth_tagged_union_data",
"FStar.Seq.Base.seq",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"Prims.unit",
"LowParse.Spec.Combinators.and_then_eq",
"LowParse.Spec.Combinators.parse_tagged_union_payload",
"LowParse.Spec.Combinators.parse_tagged_union_payload_and_then_cases_injective",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Combinators.parse_tagged_union",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.Some",
"FStar.Pervasives.Native.Mktuple2",
"Prims.op_Addition",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
)) | [] | LowParse.Spec.Combinators.parse_tagged_union_eq | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
pt: LowParse.Spec.Base.parser kt tag_t ->
tag_of_data: (_: data_t -> Prims.GTot tag_t) ->
p: (t: tag_t -> LowParse.Spec.Base.parser k (LowParse.Spec.Base.refine_with_tag tag_of_data t)) ->
input: LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(ensures
LowParse.Spec.Base.parse (LowParse.Spec.Combinators.parse_tagged_union pt tag_of_data p) input ==
((match LowParse.Spec.Base.parse pt input with
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None
| FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ tg consumed_tg) ->
let input_tg = FStar.Seq.Base.slice input consumed_tg (FStar.Seq.Base.length input) in
(match LowParse.Spec.Base.parse (p tg) input_tg with
| FStar.Pervasives.Native.Some
#_
(FStar.Pervasives.Native.Mktuple2 #_ #_ x consumed_x) ->
FStar.Pervasives.Native.Some (x, consumed_tg + consumed_x)
| FStar.Pervasives.Native.None #_ -> FStar.Pervasives.Native.None)
<:
FStar.Pervasives.Native.option (data_t * LowParse.Spec.Base.consumed_length input))
<:
FStar.Pervasives.Native.option (data_t * LowParse.Spec.Base.consumed_length input))) | {
"end_col": 112,
"end_line": 229,
"start_col": 2,
"start_line": 223
} |
Prims.Pure | val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1)) | val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
))
let tot_parse_synth #k #t1 #t2 p1 f2 = | false | null | false | coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1)) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Base.coerce",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.parse_ret_kind",
"LowParse.Spec.Combinators.tot_and_then",
"LowParse.Spec.Combinators.tot_parse_fret"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: tot_parser k t1)
(f2: t1 -> Tot t2)
: Pure (tot_parser k t2)
(requires (
synth_injective f2
))
(ensures (fun y ->
forall x . parse y x == parse (parse_synth #k p1 f2) x
)) | [] | LowParse.Spec.Combinators.tot_parse_synth | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | p1: LowParse.Spec.Base.tot_parser k t1 -> f2: (_: t1 -> t2)
-> Prims.Pure (LowParse.Spec.Base.tot_parser k t2) | {
"end_col": 77,
"end_line": 90,
"start_col": 2,
"start_line": 90
} |
Prims.Pure | val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p | val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
))
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p = | false | null | false | parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` (tot_parse_tagged_union_payload tag_of_data p) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"LowParse.Spec.Base.refine_with_tag",
"LowParse.Spec.Combinators.tot_and_then",
"LowParse.Spec.Combinators.tot_parse_tagged_union_payload",
"Prims.unit",
"LowParse.Spec.Combinators.parse_tagged_union_payload_and_then_cases_injective",
"LowParse.Spec.Combinators.and_then_kind"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_parse_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(pt: tot_parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> Tot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (tot_parser k (refine_with_tag tag_of_data t)))
: Pure (tot_parser (and_then_kind kt k) data_t)
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_tagged_union #kt pt tag_of_data #k p) x
)) | [] | LowParse.Spec.Combinators.tot_parse_tagged_union | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
pt: LowParse.Spec.Base.tot_parser kt tag_t ->
tag_of_data: (_: data_t -> tag_t) ->
p:
(t: tag_t
-> LowParse.Spec.Base.tot_parser k (LowParse.Spec.Base.refine_with_tag tag_of_data t))
-> Prims.Pure
(LowParse.Spec.Base.tot_parser (LowParse.Spec.Combinators.and_then_kind kt k) data_t) | {
"end_col": 64,
"end_line": 269,
"start_col": 2,
"start_line": 268
} |
Prims.Pure | val tot_parse_filter
(#k: parser_kind)
(#t: Type)
(p: tot_parser k t)
(f: (t -> Tot bool))
: Pure (tot_parser (parse_filter_kind k) (parse_filter_refine f))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_filter #k p f) x
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let tot_parse_filter
#k #t p f
= p `tot_and_then` (tot_parse_filter_payload f) | val tot_parse_filter
(#k: parser_kind)
(#t: Type)
(p: tot_parser k t)
(f: (t -> Tot bool))
: Pure (tot_parser (parse_filter_kind k) (parse_filter_refine f))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_filter #k p f) x
))
let tot_parse_filter #k #t p f = | false | null | false | p `tot_and_then` (tot_parse_filter_payload f) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.tot_parser",
"Prims.bool",
"LowParse.Spec.Combinators.tot_and_then",
"LowParse.Spec.Combinators.parse_filter_payload_kind",
"LowParse.Spec.Combinators.parse_filter_refine",
"LowParse.Spec.Combinators.tot_parse_filter_payload",
"LowParse.Spec.Combinators.parse_filter_kind"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1')
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
= let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' == Seq.length (serialize (serialize_nondep_then s1 s2) x) - (Seq.length (serialize s2 (snd x)) + i') - Seq.length s')
let serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_right s (serialize s2 y);
let l2 = Seq.length s - Seq.length (serialize s2 (snd x)) in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_right_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s2' = serialize s2 (snd x) in
i' + Seq.length s' <= Seq.length s2' /\
serialize s2 y == seq_upd_seq s2' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.length s == l1 + Seq.length (serialize s2 (snd x)) /\
l1 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (l1 + i') s'
))
= serialize_nondep_then_upd_right s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s2' = serialize s2 (snd x) in
let l2 = Seq.length s - Seq.length s2' in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) s2';
seq_upd_seq_right_to_left s l2 s2' i' s';
seq_upd_seq_slice_idem s l2 (Seq.length s)
#reset-options "--z3rlimit 32 --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let make_total_constant_size_parser_compose
(sz: nat)
(t1 t2: Type)
(f1: ((s: bytes {Seq.length s == sz}) -> GTot t1))
(g2: t1 -> GTot t2)
: Lemma
(requires (
make_total_constant_size_parser_precond sz t1 f1 /\
(forall x x' . g2 x == g2 x' ==> x == x')
))
(ensures (
make_total_constant_size_parser_precond sz t1 f1 /\
make_total_constant_size_parser_precond sz t2 (f1 `compose` g2) /\
(forall x x' . {:pattern (g2 x); (g2 x')} g2 x == g2 x' ==> x == x') /\
(forall input . {:pattern (parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input)} parse (make_total_constant_size_parser sz t2 (f1 `compose` g2)) input == parse (make_total_constant_size_parser sz t1 f1 `parse_synth` g2) input)
))
= ()
let parse_filter
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
: Tot (parser (parse_filter_kind k) (parse_filter_refine f))
= p `and_then` (parse_filter_payload f)
let parse_filter_eq
(#k: parser_kind)
(#t: Type)
(p: parser k t)
(f: (t -> GTot bool))
(input: bytes)
: Lemma
(parse (parse_filter p f) input == (match parse p input with
| None -> None
| Some (x, consumed) ->
if f x
then Some (x, consumed)
else None
))
= ()
let tot_parse_filter_payload
(#t: Type)
(f: (t -> Tot bool))
(v: t)
: Tot (tot_parser parse_filter_payload_kind (parse_filter_refine f))
= let p : tot_bare_parser (parse_filter_refine f) =
if f v
then
let v' : (x: t { f x == true } ) = v in
tot_weaken parse_filter_payload_kind (tot_parse_ret v')
else tot_fail_parser parse_filter_payload_kind (parse_filter_refine f)
in
parser_kind_prop_equiv parse_filter_payload_kind p;
p
let tot_parse_filter | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val tot_parse_filter
(#k: parser_kind)
(#t: Type)
(p: tot_parser k t)
(f: (t -> Tot bool))
: Pure (tot_parser (parse_filter_kind k) (parse_filter_refine f))
(requires True)
(ensures (fun y ->
forall x . parse y x == parse (parse_filter #k p f) x
)) | [] | LowParse.Spec.Combinators.tot_parse_filter | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | p: LowParse.Spec.Base.tot_parser k t -> f: (_: t -> Prims.bool)
-> Prims.Pure
(LowParse.Spec.Base.tot_parser (LowParse.Spec.Combinators.parse_filter_kind k)
(LowParse.Spec.Combinators.parse_filter_refine f)) | {
"end_col": 47,
"end_line": 693,
"start_col": 2,
"start_line": 693
} |
FStar.Pervasives.Lemma | val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x)) | val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures
(let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y))) = | false | null | true | let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1)
(Seq.slice s l1 (Seq.length s))
(serialize s1 (fst x))
(serialize s2 (snd x)) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"lemma"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"FStar.Pervasives.Native.tuple2",
"FStar.Seq.Properties.lemma_append_inj",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"LowParse.Spec.Base.serialize",
"FStar.Pervasives.Native.fst",
"FStar.Pervasives.Native.snd",
"Prims.unit",
"FStar.Seq.Properties.lemma_split",
"Prims.nat",
"LowParse.Spec.Base.seq_upd_seq_left",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then",
"LowParse.Spec.Combinators.serialize_nondep_then",
"Prims.squash",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.seq",
"FStar.Pervasives.Native.Mktuple2",
"LowParse.Spec.Base.seq_upd_seq",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
)) | [] | LowParse.Spec.Combinators.serialize_nondep_then_upd_left | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: LowParse.Spec.Base.serializer p2 ->
x: (t1 * t2) ->
y: t1
-> FStar.Pervasives.Lemma
(requires
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 y) ==
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 (FStar.Pervasives.Native.fst x)))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2) x
in
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 y) <= FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2)
(y, FStar.Pervasives.Native.snd x) ==
LowParse.Spec.Base.seq_upd_seq s 0 (LowParse.Spec.Base.serialize s1 y))) | {
"end_col": 119,
"end_line": 485,
"start_col": 1,
"start_line": 481
} |
FStar.Pervasives.Lemma | val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y | val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma (requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures
(let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) ==
seq_upd_bw_seq s len2 (serialize s1 y))) = | false | null | true | serialize_nondep_then_upd_left s1 s2 x y | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"lemma"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"FStar.Pervasives.Native.tuple2",
"LowParse.Spec.Combinators.serialize_nondep_then_upd_left",
"Prims.unit",
"Prims.nat",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.serialize",
"FStar.Pervasives.Native.fst",
"Prims.squash",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Prims.op_Addition",
"FStar.Seq.Base.seq",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then",
"LowParse.Spec.Combinators.serialize_nondep_then",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Pervasives.Native.snd",
"LowParse.Spec.Base.seq_upd_bw_seq",
"LowParse.Bytes.bytes",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1')
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 16,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
)) | [] | LowParse.Spec.Combinators.serialize_nondep_then_upd_bw_left | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: LowParse.Spec.Base.serializer p2 ->
x: (t1 * t2) ->
y: t1
-> FStar.Pervasives.Lemma
(requires
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 y) ==
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 (FStar.Pervasives.Native.fst x)))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2) x
in
let len2 =
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 (FStar.Pervasives.Native.snd x))
in
len2 + FStar.Seq.Base.length (LowParse.Spec.Base.serialize s1 y) <=
FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2)
(y, FStar.Pervasives.Native.snd x) ==
LowParse.Spec.Base.seq_upd_bw_seq s len2 (LowParse.Spec.Base.serialize s1 y))) | {
"end_col": 42,
"end_line": 539,
"start_col": 2,
"start_line": 539
} |
FStar.Pervasives.Lemma | val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
= let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' == Seq.length (serialize (serialize_nondep_then s1 s2) x) - (Seq.length (serialize s2 (snd x)) + i') - Seq.length s') | val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i': nat)
(s': bytes)
: Lemma
(requires
(let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\ serialize s1 y == seq_upd_bw_seq s1' i' s'))
(ensures
(let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s')) = | false | null | true | let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' ==
Seq.length (serialize (serialize_nondep_then s1 s2) x) -
(Seq.length (serialize s2 (snd x)) + i') -
Seq.length s') | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"lemma"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"FStar.Pervasives.Native.tuple2",
"Prims.nat",
"LowParse.Bytes.bytes",
"Prims._assert",
"Prims.int",
"Prims.op_Subtraction",
"FStar.Seq.Base.length",
"LowParse.Bytes.byte",
"LowParse.Spec.Base.serialize",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then",
"LowParse.Spec.Combinators.serialize_nondep_then",
"Prims.op_Addition",
"FStar.Pervasives.Native.snd",
"Prims.unit",
"LowParse.Spec.Combinators.serialize_nondep_then_upd_left_chain",
"FStar.Pervasives.Native.fst",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.seq",
"LowParse.Spec.Base.seq_upd_bw_seq",
"Prims.squash",
"FStar.Pervasives.Native.Mktuple2",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1')
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s' | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [
"smt.arith.nl=false"
],
"z3refresh": true,
"z3rlimit": 64,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
)) | [] | LowParse.Spec.Combinators.serialize_nondep_then_upd_bw_left_chain | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: LowParse.Spec.Base.serializer p2 ->
x: (t1 * t2) ->
y: t1 ->
i': Prims.nat ->
s': LowParse.Bytes.bytes
-> FStar.Pervasives.Lemma
(requires
(let s1' = LowParse.Spec.Base.serialize s1 (FStar.Pervasives.Native.fst x) in
i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s1' /\
LowParse.Spec.Base.serialize s1 y == LowParse.Spec.Base.seq_upd_bw_seq s1' i' s'))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2) x
in
let len2 =
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 (FStar.Pervasives.Native.snd x))
in
len2 + i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2)
(y, FStar.Pervasives.Native.snd x) ==
LowParse.Spec.Base.seq_upd_bw_seq s (len2 + i') s')) | {
"end_col": 130,
"end_line": 570,
"start_col": 1,
"start_line": 568
} |
FStar.Pervasives.Lemma | val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
)) | [
{
"abbrev": true,
"full_module": "FStar.Tactics",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": false,
"full_module": "LowParse.Spec.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_right s (serialize s2 y);
let l2 = Seq.length s - Seq.length (serialize s2 (snd x)) in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2) (Seq.slice s l2 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x)) | val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
))
let serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 {k1.parser_kind_subkind == Some ParserStrong})
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma (requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures
(let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) ==
seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y))) = | false | null | true | let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_right s (serialize s2 y);
let l2 = Seq.length s - Seq.length (serialize s2 (snd x)) in
Seq.lemma_split s l2;
Seq.lemma_append_inj (Seq.slice s 0 l2)
(Seq.slice s l2 (Seq.length s))
(serialize s1 (fst x))
(serialize s2 (snd x)) | {
"checked_file": "LowParse.Spec.Combinators.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Base.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Tactics.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Classical.fsti.checked"
],
"interface_file": true,
"source_file": "LowParse.Spec.Combinators.fst"
} | [
"lemma"
] | [
"LowParse.Spec.Base.parser_kind",
"LowParse.Spec.Base.parser",
"LowParse.Spec.Base.serializer",
"Prims.eq2",
"FStar.Pervasives.Native.option",
"LowParse.Spec.Base.parser_subkind",
"LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind",
"FStar.Pervasives.Native.Some",
"LowParse.Spec.Base.ParserStrong",
"FStar.Pervasives.Native.tuple2",
"FStar.Seq.Properties.lemma_append_inj",
"LowParse.Bytes.byte",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"LowParse.Spec.Base.serialize",
"FStar.Pervasives.Native.fst",
"FStar.Pervasives.Native.snd",
"Prims.unit",
"FStar.Seq.Properties.lemma_split",
"Prims.int",
"Prims.op_Subtraction",
"LowParse.Spec.Base.seq_upd_seq_right",
"LowParse.Bytes.bytes",
"LowParse.Spec.Combinators.and_then_kind",
"LowParse.Spec.Combinators.nondep_then",
"LowParse.Spec.Combinators.serialize_nondep_then",
"Prims.nat",
"Prims.squash",
"Prims.l_and",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.Seq.Base.seq",
"FStar.Pervasives.Native.Mktuple2",
"LowParse.Spec.Base.seq_upd_seq",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Spec.Combinators
include LowParse.Spec.Base
module Seq = FStar.Seq
module U8 = FStar.UInt8
module U32 = FStar.UInt32
module T = FStar.Tactics
#reset-options "--using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let and_then #k #t p #k' #t' p' =
let f : bare_parser t' = and_then_bare p p' in
and_then_correct p p' ;
f
let and_then_eq
(#k: parser_kind)
(#t:Type)
(p:parser k t)
(#k': parser_kind)
(#t':Type)
(p': (t -> Tot (parser k' t')))
(input: bytes)
: Lemma
(requires (and_then_cases_injective p'))
(ensures (parse (and_then p p') input == and_then_bare p p' input))
= ()
let tot_and_then_bare (#t:Type) (#t':Type)
(p:tot_bare_parser t)
(p': (t -> Tot (tot_bare_parser t'))) :
Tot (tot_bare_parser t') =
fun (b: bytes) ->
match p b with
| Some (v, l) ->
begin
let p'v = p' v in
let s' : bytes = Seq.slice b l (Seq.length b) in
match p'v s' with
| Some (v', l') ->
let res : consumed_length b = l + l' in
Some (v', res)
| None -> None
end
| None -> None
let tot_and_then #k #t p #k' #t' p' =
let f : tot_bare_parser t' = tot_and_then_bare p p' in
and_then_correct #k p #k' p' ;
parser_kind_prop_ext (and_then_kind k k') (and_then_bare p p') f;
f
let parse_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
: Pure (parser k t2)
(requires (
synth_injective f2
))
(ensures (fun _ -> True))
= coerce (parser k t2) (and_then p1 (fun v1 -> parse_fret f2 v1))
let parse_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(b: bytes)
: Lemma
(requires (synth_injective f2))
(ensures (parse (parse_synth p1 f2) b == parse_synth' p1 f2 b))
= ()
unfold
let tot_parse_fret' (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_bare_parser t') =
fun (b: bytes) -> Some (f v, (0 <: consumed_length b))
unfold
let tot_parse_fret (#t #t':Type) (f: t -> Tot t') (v:t) : Tot (tot_parser parse_ret_kind t') =
[@inline_let] let _ = parser_kind_prop_equiv parse_ret_kind (tot_parse_fret' f v) in
tot_parse_fret' f v
let tot_parse_synth
#k #t1 #t2 p1 f2
= coerce (tot_parser k t2) (tot_and_then p1 (fun v1 -> tot_parse_fret f2 v1))
let bare_serialize_synth_correct #k #t1 #t2 p1 f2 s1 g1 =
()
let serialize_synth
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
: Tot (serializer (parse_synth p1 f2))
= bare_serialize_synth_correct p1 f2 s1 g1;
bare_serialize_synth p1 f2 s1 g1
let serialize_synth_eq
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x: t2)
: Lemma
(serialize (serialize_synth p1 f2 s1 g1 u) x == serialize s1 (g1 x))
= ()
let serialize_synth_upd_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let serialize_synth_upd_bw_chain
(#k: parser_kind)
(#t1: Type)
(#t2: Type)
(p1: parser k t1)
(f2: t1 -> GTot t2)
(s1: serializer p1)
(g1: t2 -> GTot t1)
(u: unit {
synth_inverse f2 g1 /\
synth_injective f2
})
(x1: t1)
(x2: t2)
(y1: t1)
(y2: t2)
(i': nat)
(s' : bytes)
: Lemma
(requires (
let s = serialize s1 x1 in
i' + Seq.length s' <= Seq.length s /\
serialize s1 y1 == seq_upd_bw_seq s i' s' /\
x2 == f2 x1 /\
y2 == f2 y1
))
(ensures (
let s = serialize (serialize_synth p1 f2 s1 g1 u) x2 in
i' + Seq.length s' <= Seq.length s /\
Seq.length s == Seq.length (serialize s1 x1) /\
serialize (serialize_synth p1 f2 s1 g1 u) y2 == seq_upd_bw_seq s i' s'
))
= (* I don't know which are THE terms to exhibit among x1, x2, y1, y2 to make the patterns trigger *)
assert (forall w w' . f2 w == f2 w' ==> w == w');
assert (forall w . f2 (g1 w) == w)
let parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
pt `and_then` parse_tagged_union_payload tag_of_data p
let parse_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == (match parse pt input with
| None -> None
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
begin match parse (p tg) input_tg with
| Some (x, consumed_x) -> Some ((x <: data_t), consumed_tg + consumed_x)
| None -> None
end
))
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg
let parse_tagged_union_eq_gen
(#kt: parser_kind)
(#tag_t: Type)
(pt: parser kt tag_t)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(#kt': parser_kind)
(pt': parser kt' tag_t)
(lem_pt: (
(input: bytes) ->
Lemma
(parse pt input == parse pt' input)
))
(k': (t: tag_t) -> Tot parser_kind)
(p': (t: tag_t) -> Tot (parser (k' t) (refine_with_tag tag_of_data t)))
(lem_p' : (
(k: tag_t) ->
(input: bytes) ->
Lemma
(parse (p k) input == parse (p' k) input)
))
(input: bytes)
: Lemma
(parse (parse_tagged_union pt tag_of_data p) input == bare_parse_tagged_union pt' tag_of_data k' p' input)
= parse_tagged_union_payload_and_then_cases_injective tag_of_data p;
and_then_eq pt (parse_tagged_union_payload tag_of_data p) input;
lem_pt input;
match parse pt input with
| None -> ()
| Some (tg, consumed_tg) ->
let input_tg = Seq.slice input consumed_tg (Seq.length input) in
parse_synth_eq #k #(refine_with_tag tag_of_data tg) (p tg) (synth_tagged_union_data tag_of_data tg) input_tg;
lem_p' tg input_tg
let tot_parse_tagged_union #kt #tag_t pt #data_t tag_of_data #k p =
parse_tagged_union_payload_and_then_cases_injective tag_of_data #k p;
pt `tot_and_then` tot_parse_tagged_union_payload tag_of_data p
let serialize_tagged_union
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
: Pure (serializer (parse_tagged_union pt tag_of_data p))
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (fun _ -> True))
= bare_serialize_tagged_union_correct st tag_of_data s;
bare_serialize_tagged_union st tag_of_data s
let serialize_tagged_union_eq
(#kt: parser_kind)
(#tag_t: Type)
(#pt: parser kt tag_t)
(st: serializer pt)
(#data_t: Type)
(tag_of_data: (data_t -> GTot tag_t))
(#k: parser_kind)
(#p: (t: tag_t) -> Tot (parser k (refine_with_tag tag_of_data t)))
(s: (t: tag_t) -> Tot (serializer (p t)))
(input: data_t)
: Lemma
(requires (kt.parser_kind_subkind == Some ParserStrong))
(ensures (serialize (serialize_tagged_union st tag_of_data s) input == bare_serialize_tagged_union st tag_of_data s input))
[SMTPat (serialize (serialize_tagged_union st tag_of_data s) input)]
= ()
let serialize_dtuple2
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
: Tot (serializer (parse_dtuple2 p1 p2))
= serialize_tagged_union
s1
dfst
(fun (x: t1) -> serialize_synth (p2 x) (synth_dtuple2 x) (s2 x) (synth_dtuple2_recip x) ())
let parse_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(p2: (x: t1) -> parser k2 (t2 x))
(b: bytes)
: Lemma
(parse (parse_dtuple2 p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse (p2 x1) b' with
| Some (x2, consumed2) ->
Some ((| x1, x2 |), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%parse_dtuple2;]])
= ()
let serialize_dtuple2_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong })
(#k2: parser_kind)
(#t2: (t1 -> Tot Type))
(#p2: (x: t1) -> parser k2 (t2 x))
(s2: (x: t1) -> serializer (p2 x))
(xy: dtuple2 t1 t2)
: Lemma
(serialize (serialize_dtuple2 s1 s2) xy == serialize s1 (dfst xy) `Seq.append` serialize (s2 (dfst xy)) (dsnd xy))
= ()
(* Special case for non-dependent parsing *)
let nondep_then
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
: Tot (parser (and_then_kind k1 k2) (t1 * t2))
= parse_tagged_union
p1
fst
(fun x -> parse_synth p2 (fun y -> (x, y) <: refine_with_tag fst x))
#set-options "--z3rlimit 16"
let nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(p1: parser k1 t1)
(#k2: parser_kind)
(#t2: Type)
(p2: parser k2 t2)
(b: bytes)
: Lemma
(parse (nondep_then p1 p2) b == (match parse p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match parse p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
))
by (T.norm [delta_only [`%nondep_then;]])
= ()
let tot_nondep_then_bare
(#t1: Type)
(p1: tot_bare_parser t1)
(#t2: Type)
(p2: tot_bare_parser t2)
: Tot (tot_bare_parser (t1 & t2))
= fun b -> match p1 b with
| Some (x1, consumed1) ->
let b' = Seq.slice b consumed1 (Seq.length b) in
begin match p2 b' with
| Some (x2, consumed2) ->
Some ((x1, x2), consumed1 + consumed2)
| _ -> None
end
| _ -> None
let tot_nondep_then #k1 #t1 p1 #k2 #t2 p2 =
Classical.forall_intro (nondep_then_eq #k1 p1 #k2 p2);
parser_kind_prop_ext (and_then_kind k1 k2) (nondep_then #k1 p1 #k2 p2) (tot_nondep_then_bare p1 p2);
tot_nondep_then_bare p1 p2
let serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
: Tot (serializer (nondep_then p1 p2))
= serialize_tagged_union
s1
fst
(fun x -> serialize_synth p2 (fun y -> (x, y) <: refine_with_tag fst x) s2 (fun (xy: refine_with_tag fst x) -> snd xy) ())
let serialize_nondep_then_eq
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input: t1 * t2)
: Lemma
(serialize (serialize_nondep_then s1 s2) input == bare_serialize_nondep_then p1 s1 p2 s2 input)
= ()
let length_serialize_nondep_then
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(input1: t1)
(input2: t2)
: Lemma
(Seq.length (serialize (serialize_nondep_then s1 s2) (input1, input2)) == Seq.length (serialize s1 input1) + Seq.length (serialize s2 input2))
= ()
let serialize_nondep_then_upd_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s 0 (serialize s1 y)
))
= let s = serialize (serialize_nondep_then s1 s2) x in
seq_upd_seq_left s (serialize s1 y);
let l1 = Seq.length (serialize s1 (fst x)) in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) (serialize s1 (fst x)) (serialize s2 (snd x))
let serialize_nondep_then_upd_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_seq s i' s'
))
= serialize_nondep_then_upd_left s1 s2 x y;
let s = serialize (serialize_nondep_then s1 s2) x in
let s1' = serialize s1 (fst x) in
let l1 = Seq.length s1' in
Seq.lemma_split s l1;
Seq.lemma_append_inj (Seq.slice s 0 l1) (Seq.slice s l1 (Seq.length s)) s1' (serialize s2 (snd x));
seq_upd_seq_right_to_left s 0 s1' i' s';
seq_upd_seq_slice_idem s 0 (Seq.length s1')
let serialize_nondep_then_upd_bw_left
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
: Lemma
(requires (Seq.length (serialize s1 y) == Seq.length (serialize s1 (fst x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + Seq.length (serialize s1 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s len2 (serialize s1 y)
))
= serialize_nondep_then_upd_left s1 s2 x y
#reset-options "--z3refresh --z3rlimit 64 --z3cliopt smt.arith.nl=false --using_facts_from '* -FStar.Tactis -FStar.Reflection'"
let serialize_nondep_then_upd_bw_left_chain
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t1)
(i' : nat)
(s' : bytes)
: Lemma
(requires (
let s1' = serialize s1 (fst x) in
i' + Seq.length s' <= Seq.length s1' /\
serialize s1 y == seq_upd_bw_seq s1' i' s'
))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
let len2 = Seq.length (serialize s2 (snd x)) in
len2 + i' + Seq.length s' <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (y, snd x) == seq_upd_bw_seq s (len2 + i') s'
))
= let j' = Seq.length (serialize s1 (fst x)) - i' - Seq.length s' in
serialize_nondep_then_upd_left_chain s1 s2 x y j' s';
assert (j' == Seq.length (serialize (serialize_nondep_then s1 s2) x) - (Seq.length (serialize s2 (snd x)) + i') - Seq.length s')
let serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y) | false | false | LowParse.Spec.Combinators.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [
"smt.arith.nl=false"
],
"z3refresh": true,
"z3rlimit": 64,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val serialize_nondep_then_upd_right
(#k1: parser_kind)
(#t1: Type)
(#p1: parser k1 t1)
(s1: serializer p1 { k1.parser_kind_subkind == Some ParserStrong } )
(#k2: parser_kind)
(#t2: Type)
(#p2: parser k2 t2)
(s2: serializer p2)
(x: t1 * t2)
(y: t2)
: Lemma
(requires (Seq.length (serialize s2 y) == Seq.length (serialize s2 (snd x))))
(ensures (
let s = serialize (serialize_nondep_then s1 s2) x in
Seq.length (serialize s2 y) <= Seq.length s /\
serialize (serialize_nondep_then s1 s2) (fst x, y) == seq_upd_seq s (Seq.length s - Seq.length (serialize s2 y)) (serialize s2 y)
)) | [] | LowParse.Spec.Combinators.serialize_nondep_then_upd_right | {
"file_name": "src/lowparse/LowParse.Spec.Combinators.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} |
s1:
LowParse.Spec.Base.serializer p1
{ Mkparser_kind'?.parser_kind_subkind k1 ==
FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } ->
s2: LowParse.Spec.Base.serializer p2 ->
x: (t1 * t2) ->
y: t2
-> FStar.Pervasives.Lemma
(requires
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 y) ==
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 (FStar.Pervasives.Native.snd x)))
(ensures
(let s =
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2) x
in
FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 y) <= FStar.Seq.Base.length s /\
LowParse.Spec.Base.serialize (LowParse.Spec.Combinators.serialize_nondep_then s1 s2)
(FStar.Pervasives.Native.fst x, y) ==
LowParse.Spec.Base.seq_upd_seq s
(FStar.Seq.Base.length s - FStar.Seq.Base.length (LowParse.Spec.Base.serialize s2 y))
(LowParse.Spec.Base.serialize s2 y))) | {
"end_col": 119,
"end_line": 594,
"start_col": 1,
"start_line": 590
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.